--- /dev/null
+debian/changelog merge=dpkg-mergechangelogs
--- /dev/null
+ikiwiki.setup
+Makefile
+Makefile.old
+blib/*
+/cover_db
+doc/.ikiwiki/*
+html/*
+ikiwiki.out
+ikiwiki-transition.out
+ikiwiki-calendar.out
+pm_to_blib
+/MYMETA.yml
+*.man
+/po/cover_db
+po/po2wiki_stamp
+po/underlays/*/*.mdwn
+po/underlays/basewiki/*/*.mdwn
+po/underlays/basewiki/*/*/*.mdwn
+po/underlays/directives/ikiwiki/directive/*.mdwn
+po/underlays_copy_stamp
+underlays/locale
+/t/tmp/
htmllink readfile writefile pagetype srcfile pagename
displaytime strftime_utf8 will_render gettext ngettext urlto targetpage
add_underlay pagetitle titlepage linkpage newpagefile
- inject add_link add_autofile
+ inject add_link add_autofile useragent
%config %links %pagestate %wikistate %renderedfiles
%pagesources %destsources %typedlinks);
our $VERSION = 3.00; # plugin interface version, next is ikiwiki version
safe => 0,
rebuild => 0,
},
+ cookiejar => {
+ type => "string",
+ default => "$ENV{HOME}/.ikiwiki/cookies",
+ description => "cookie control",
+ example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
+ safe => 0, # hooks into perl module internals
+ rebuild => 0,
+ },
}
sub defaultconfig () {
$autofiles{$file}{generator}=$generator;
}
+sub useragent () {
+ return LWP::UserAgent->new(
+ cookie_jar => $config{cookiejar},
+ env_proxy => 1, # respect proxy env vars
+ );
+}
+
sub sortspec_translate ($$) {
my $spec = shift;
my $reverse = shift;
safe => 1,
rebuild => 0,
},
- cookiejar => {
- type => "string",
- example => { file => "$ENV{HOME}/.ikiwiki/cookies" },
- safe => 0, # hooks into perl module internals
- description => "cookie control",
- },
}
sub checkconfig () {
if (! defined $config{aggregateinternal}) {
$config{aggregateinternal}=1;
}
- if (! defined $config{cookiejar}) {
- $config{cookiejar}={ file => "$ENV{HOME}/.ikiwiki/cookies" };
- }
# This is done here rather than in a refresh hook because it
# needs to run before the wiki is locked.
}
$feed->{feedurl}=pop @urls;
}
- my $res=URI::Fetch->fetch($feed->{feedurl},
- UserAgent => LWP::UserAgent->new(
- cookie_jar => $config{cookiejar},
- ),
- );
+ my $ua=useragent();
+ my $res=URI::Fetch->fetch($feed->{feedurl}, UserAgent=>$ua);
if (! $res) {
$feed->{message}=URI::Fetch->errstr;
$feed->{error}=1;
feed => $feed,
copyright => $f->copyright,
title => defined $entry->title ? decode_entities($entry->title) : "untitled",
+ author => defined $entry->author ? decode_entities($entry->author) : "",
link => $entry->link,
content => (defined $c && defined $c->body) ? $c->body : "",
guid => defined $entry->id ? $entry->id : time."_".$feed->{name},
}
$template->param(title => $params{title})
if defined $params{title} && length($params{title});
+ $template->param(author => $params{author})
+ if defined $params{author} && length($params{author}
+ && $params{author} ne $feed->{name});
$template->param(content => wikiescape(htmlabs($params{content},
defined $params{base} ? $params{base} : $feed->{feedurl})));
$template->param(name => $feed->{name});
$tag='month-calendar-day-link';
}
$calendar.=qq{\t\t<td class="$tag $downame{$wday}">};
- if (scalar(@{$linkcache{$key}}) == 1) {
- # Only one posting on this page
- my $page = $linkcache{$key}[0];
+ $calendar.=qq{<div class='popup'>$day<div class='balloon'>};
+ # Several postings on this page
+ $calendar.=qq{<ul>};
+ foreach my $page (@{$linkcache{$key}}) {
+ $calendar.= qq{\n\t\t\t<li>};
+ my $title;
+ if (exists $pagestate{$page}{meta}{title}) {
+ $title = "$pagestate{$page}{meta}{title}";
+ }
+ else {
+ $title = pagetitle(IkiWiki::basename($page));
+ }
$calendar.=htmllink($params{page}, $params{destpage},
$page,
noimageinline => 1,
- linktext => $day,
- title => pagetitle(IkiWiki::basename($page)));
- }
- else {
- $calendar.=qq{<div class='popup'>$day<div class='balloon'>};
- # Several postings on this page
- $calendar.=qq{<ul>};
- foreach my $page (@{$linkcache{$key}}) {
- $calendar.= qq{\n\t\t\t<li>};
- my $title;
- if (exists $pagestate{$page}{meta}{title}) {
- $title = "$pagestate{$page}{meta}{title}";
- }
- else {
- $title = pagetitle(IkiWiki::basename($page));
- }
- $calendar.=htmllink($params{page}, $params{destpage},
- $page,
- noimageinline => 1,
- linktext => $title,
- title => $title);
- $calendar.= '</li>';
- }
- $calendar.=qq{\n\t\t</ul>};
- $calendar.=qq{</div></div>};
+ linktext => $title,
+ title => $title);
+ $calendar.= '</li>';
}
+ $calendar.=qq{\n\t\t</ul>};
+ $calendar.=qq{</div></div>};
$calendar.=qq{</td>\n};
}
else {
return $ret;
}
+sub genenclosure {
+ my $itemtemplate=shift;
+ my $url=shift;
+ my $file=shift;
+
+ return unless $itemtemplate->query(name => "enclosure");
+
+ my $size=(srcfile_stat($file))[8];
+ my $mime="unknown";
+ eval q{use File::MimeInfo};
+ if (! $@) {
+ $mime = mimetype($file);
+ }
+ $itemtemplate->param(
+ enclosure => $url,
+ type => $mime,
+ length => $size,
+ );
+}
+
sub genfeed ($$$$$@) {
my $feedtype=shift;
my $feedurl=shift;
foreach my $p (@pages) {
my $u=URI->new(encode_utf8(urlto($p, "", 1)));
my $pcontent = absolute_urls(get_inline_content($p, $page), $url);
+ my $fancy_enclosure_seen = 0;
$itemtemplate->param(
title => pagetitle(basename($p)),
$itemtemplate->param(mdate_822 => date_822($pagestate{$p}{meta}{updated}));
$itemtemplate->param(mdate_3339 => date_3339($pagestate{$p}{meta}{updated}));
}
- }
- if ($itemtemplate->query(name => "enclosure")) {
- my $file=$pagesources{$p};
- my $type=pagetype($file);
- if (defined $type) {
- $itemtemplate->param(content => $pcontent);
- }
- else {
- my $size=(srcfile_stat($file))[8];
- my $mime="unknown";
- eval q{use File::MimeInfo};
- if (! $@) {
- $mime = mimetype($file);
- }
- $itemtemplate->param(
- enclosure => $u,
- type => $mime,
- length => $size,
- );
+ if (exists $pagestate{$p}{meta}{enclosure}) {
+ my $absurl = $pagestate{$p}{meta}{enclosure};
+ my $file = $pagestate{$p}{meta}{enclosurefile};
+ genenclosure($itemtemplate, $absurl, $file);
+ $fancy_enclosure_seen = 1;
}
}
- else {
- $itemtemplate->param(content => $pcontent);
+
+ my $file=$pagesources{$p};
+ unless ($fancy_enclosure_seen || defined(pagetype($file))) {
+ genenclosure($itemtemplate, $u, $file);
+ $itemtemplate->param(simplepodcast => 1);
}
+ $itemtemplate->param(content => $pcontent);
+
run_hooks(pagetemplate => sub {
shift->(page => $p, destpage => $page,
template => $itemtemplate);
feeddesc => $feeddesc,
guid => $guid,
feeddate => date_3339($lasttime),
+ feeddate_822 => date_822($lasttime),
feedurl => $feedurl,
);
run_hooks(pagetemplate => sub {
add_link($page, $value);
return "";
}
+ elsif ($key eq 'enclosure') {
+ my $link=bestlink($page, $value);
+ if (! length $link) {
+ error gettext("enclosure not found")
+ }
+ add_depends($page, $link, deptype("presence"));
+
+ $value=urlto($link, $page, 1);
+ $pagestate{$page}{meta}{enclosure}=$value;
+ $pagestate{$page}{meta}{enclosurefile}=$link;
+ # fallthrough
+ }
elsif ($key eq 'author') {
$pagestate{$page}{meta}{author}=$value;
if (exists $params{sortas}) {
$template->param(title_overridden => 1);
}
+ if (exists $pagestate{$page}{meta}{enclosure}) {
+ $template->param(enclosure => HTML::Entities::encode_entities(IkiWiki::urlabs($pagestate{$page}{meta}{enclosure}, $config{url})));
+ }
+
foreach my $field (qw{authorurl}) {
eval q{use HTML::Entities};
$template->param($field => HTML::Entities::encode_entities($pagestate{$page}{meta}{$field}))
$ua=LWPx::ParanoidAgent->new;
}
else {
- $ua=LWP::UserAgent->new;
+ $ua=useragent();
}
# Store the secret in the session.
debug(gettext("LWP not found, not pinging"));
return;
}
- $ua=LWP::UserAgent->new;
+ $ua=useragent();
}
$ua->timeout($config{pinger_timeout} || 15);
--- /dev/null
+files
+ikiwiki
+ikiwiki.substvars
+ikiwiki (3.20130712) UNRELEASED; urgency=low
+
+ * calendar: Display the popup mouseover when there is only 1 page for a
+ given day, for better UI consistency.
+ * meta: Can now be used to add an enclosure to a page, which is a fancier
+ way to do podcasting than just inlining the media files directly;
+ this way you can write a post about the podcast episode with show notes,
+ author information, etc.
+ (schmonz)
+ * aggregate: Show author in addition to feedname, if different.
+ (schmonz)
+ * Consistently configure LWP::UserAgent to allow use of http_proxy
+ and no_proxy environment variables, as well as ~/.ikiwiki/cookies
+ (schmonz)
+
+ -- Joey Hess <joeyh@debian.org> Fri, 19 Jul 2013 18:53:23 -0400
+
ikiwiki (3.20130711) unstable; urgency=low
* Deal with git behavior change in 1.7.2 and newer that broke support
> i assume that this behavior stems from times when wikilinks and [[ikiwiki/directive]]s were not distinguished by \[[ vs \[[! but by the use of whitespace in directives, so whitespace had to be avoided in wikilinks.
>
> --[[chrysn]]
+
+> having hacked around in the [[plugins/link]] plugin, i can confirm that the link texts are explicitly de-escaped, and that when no pipe is inside the link (ie links like `\[[cmd_test]]`), the string `"cmd_test"` is regarded as a link (that will subsequently be converted to a readable text) rather than as a readable text (for which a suitable link target is found automatically). --[[chrysn]]
>>>>> installed, even with the above commit, `openid` won't be able to
>>>>> traverse a proxy. --[[schmonz]]
-[[!template id=gitbranch branch=schmonz/proxies author="[[schmonz]]"]]
+[[!template id=gitbranch branch=schmonz/proxy author="[[schmonz]]"]]
->>>>> I bollixed up my git, recloned, and reapplied the diffs, so
->>>>> that commit won't exist anymore. My proxy-related changes are
->>>>> now on a branch. --[[schmonz]]
+>>>>>> I've redone this from scratch, much more simply, on a new
+>>>>>> branch. --[[schmonz]].
> However, there is a simple way to avoid both problems: Use WikiLinks
> and/or the [[img_directive|ikiwiki/directive/img]]. --[[Joey]]
+> > For some inline HTML (e.g. SVG embedded with `<object>` tags, it
+> > would be nice to have a URL directive for URL-only WikiLinks.
+> > Something like:
+> > `<object type="image/svg+xml" data="[[!url image.svg]]"></object>`.
+> > This would be a more general solution than an [[SVG-specific
+> > fix|todo/svg]]. --[[wtk]]
+
[[!tag done]]
+[[!template id=gitbranch branch=chrysn/linkmapenhancement author="[[chrysn]]"]]
+
[[ikiwiki/directive/linkmap]]s display the file name instead of the pagetitle, showing unsightly underscore escapes and underscores instead of blanks to users.
the attached [[!taglink patch]] fixes this; from its commit message:
the output will look much better (at least in my wikis) with the "[[bugs/pagetitle function does not respect meta titles]]" issue fixed.
-the patch is stored in [[the patch.pl]] as created by git-format-patch. (btw, what's the preferred way to send patches, apart from creating a git branch somewhere?)
+the patch is stored in [[the patch.pl]] as created by git-format-patch, and can
+be pulled from the abovementioned branch.
--- /dev/null
+Using the osm plugin with a simple \[[!osm]] directive does not seem to work, a "TypeError: mapProjection is null" is given. I believe this is because the client side Javascript uses the options.layers, which is always Null.
+
+[[!tag patch]]
+I have produced a patch for this issue, but beware, while it appears to fix the problem for me, I have little understanding of perl and the existing code base.
+<https://github.com/cbaines/ikiwiki/commit/4294b4c24a56c7103c48250dd9d833b42838a472>
--- /dev/null
+the preprocessing hook makes sure that no infinite loops occur by restricting the depth of nested directives to 3.
+
+this is insufficient in some situations in which sidebars are conditionally assembled from templates.
+
+given there are no limits on the number of directives per page and the number of edits a user can do in a particular time frame, i assume that raising that limit slightly won't make the DoS attacks that can be done against ikiwiki too much worse.
+
+i'd like to suggest 8 as a new value for recursion depth limit. most people can wrap their minds around a depth 3 nested directive setup, but when you reach a depth of 8, it's likely to be easier to write a dedicated plugin.
+
+<code><pre>
+diff --git a/IkiWiki.pm b/IkiWiki.pm
+index 75c9579..ad0f8b0 100644
+--- a/IkiWiki.pm
++++ b/IkiWiki.pm
+@@ -1487 +1487 @@ sub preprocess ($$$;$$) {
+- if ($preprocessing{$page}++ > 3) {
++ if ($preprocessing{$page}++ > 8) {
+</pre></code>
+
+[[!tag patch]]
the wiki. This can be used to create a Planet type site that aggregates
interesting feeds.
-You can also mix blogging with podcasting by dropping audio files where
-they will be picked up like blog posts. This will work for any files that
-you would care to syndicate.
+You can also mix blogging with [[podcasting|podcast]]. Simply drop
+media files where they will be picked up like blog posts. For
+fuller-featured podcast feeds, enclose media files in blog posts
+using [[plugins/meta]]. Either way, this will work for any files
+that you would care to syndicate.
## Valid html and [[css]]
--- /dev/null
+[[!comment format=mdwn
+ username="http://smcv.pseudorandom.co.uk/"
+ nickname="smcv"
+ subject="It's a wiki: any editor can have as many blogs as they want"
+ date="2013-07-17T08:17:05Z"
+ content="""
+Ikiwiki is a wiki, so you can certainly have multiple users. Any user
+with appropriate access can create any number of blogs: they just need
+to put an [[ikiwiki/directive/inline]] directive on any page they can
+edit, with a [[ikiwiki/PageSpec]] pointing to pages (blog posts) in a
+directory where they can create pages.
+
+If you want a limited set of users to be able to edit the wiki without
+making them full wiki admins, you can use [[plugins/lockedit]]:
+
+ locked_pages: * and !(user(bob) or user(chris))
+
+or if you want \"most\" users to only be able to write on their own blog, and
+not on other users' blogs (for instance: Alice the wiki admin can edit
+anything, but Bob can only edit `users/bob/...` and Chris can only edit
+`users/chris/...`) then you can use [[plugins/lockedit]], something like:
+
+ locked_pages: * and !(user(bob) and (users/bob or users/bob/*)) and !(user(chris) and (users/chris or users/chris/*))
+
+(Wiki admins can always edit locked pages.)
+
+If you have lots of users and you know a bit of Perl, you might want
+to [[write a plugin|plugins/write]] that adds a function-like
+[[ikiwiki/PageSpec]] like `owndirectory(users)`, which would match if
+there is a logged-in user and the page being edited is equal to or
+a subpage of their directory in users.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="http://smcv.pseudorandom.co.uk/"
+ nickname="smcv"
+ subject="comment 2"
+ date="2013-07-17T08:23:32Z"
+ content="""
+IkiWiki's own [[bugs]], [[news]] and [[to-do list|todo]] are functionally
+equivalent to blogs, in fact. ([[news]] is the most obviously blog-like,
+[[bugs]] is like a blog except that solved bugs disappear from the
+page/feed, and [[todo]] only shows titles, not content, but is otherwise
+like [[bugs]].)
+"""]]
--- /dev/null
+I entered
+
+ <small>this is my license</small>
+
+into `license.mdwn`, and it shows up as
+
+ License:
+ this is my license
+
+
+I'd like to remove the line break, ideally also the redundant `License:`. How to do that?
--- /dev/null
+[[!comment format=mdwn
+ username="https://launchpad.net/~ojwb"
+ nickname="ojwb"
+ subject="comment 6"
+ date="2013-08-26T03:20:05Z"
+ content="""
+As a Xapian database is updated, changes are written out straight away to most of the tables, but the postlist changes are batched up in memory and only written to disk just before they are committed (or to free up memory during a large transaction). So the empty postlist table you're seeing means that some documents were indexed, but the indexer stopped running before anything was committed. By default, there's an auto-commit every 10000 documents added, removed, or changed, so it presumably managed to process less than 10000 documents.
+
+The issue with 1.2.5 RPMs may be down to there being two versions of the Xapian perl bindings, both of which claim to be Search::Xapian and both of which have been packaged up as RPMs. For Xapian 1.2.x, you probably want to use the XS bindings (perl-Search-Xapian) not those generated with SWIG (xapian-bindings-perl). The SWIG-generated ones are aimed to replace the XS ones, but in 1.2.x they're not really ready for prime time. If your perl bindings are described with a 4 coponent version (e.g. 1.2.15.0) then you should be good; if it's 3 components (e.g. 1.2.15) then you probably want the other ones.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="https://www.google.com/accounts/o8/id?id=AItOawmKyeW2G4jjSdnL1m6kPPtAiGFUBsnYCfY"
+ nickname="FName"
+ subject="Am i the only one using MacPorts and Ikiwiki together?"
+ date="2013-08-10T23:45:40Z"
+ content="""
+This exact same problem has been stopping me from using Ikiwiki locally. I'm still able to push to remote Ikiwiki instance. But I'd like to push it to local repo and have the website built locally first so I can confirm everything runs fine. Everytime I try to build it locally, i have this exact same problem.
+"""]]
--- /dev/null
+[[!comment format=mdwn
+ username="schmonz"
+ ip="198.228.228.165"
+ subject="MacPorts still hasn't updated past 2011 ikiwiki with a seriously old Perl"
+ date="2013-08-11T01:40:23Z"
+ content="""
+According to [this page](http://www.macports.org/ports.php?by=name&substr=Ikiwiki) nothing has changed in MacPorts' ikiwiki build. Have you tried contacting the package maintainer or, failing that, submitting an update request to the relevant mailing list?
+
+If MacPorts is this behind the times, it doesn't make sense to me to recommend it to anyone. But I'm biased in favor of pkgsrc, so I don't feel comfortable editing the [[tips/ikiwiki on Mac OS X]] page to promote the thing I happen to prefer... but pkgsrc has Perl 5.18, and I keep pkgsrc's ikiwiki package updated at every release. --[[schmonz]]
+"""]]
--- /dev/null
+There has been a meeting of people interested in ikiwiki
+[[during debconf13|http://penta.debconf.org/dc13_schedule/events/1060.en.html]]
+on 2013-08-11. Videos of the event are linked there for download, or
+[[can be viewed online|http://www.irill.org/videos/debconf13/ikiwiki_BoF.webm]].
+
+Summary
+=======
+
+Ikiwiki's state and development
+-------------------------------
+
+Ikiwiki has reached a stable state with a working ecosystem, with the majority
+of changes being minor adaptions and bugfixes these days.
+
+It is unlikely that there will be a major overhaul any time soon.
+
+If incompatible changes are to be made, that might warrant a 4.$DATE
+transition, especially given that the [[ikiwiki-transition]] mechanism has not
+been used for some time. Potential changes for such a transition will be
+discussed (see below).
+
+Names of pages and links
+------------------------
+
+Several of [[the issues chrysn deals with|users/chrysn]] revolve about the
+differences betwen a page's name, its title, the name of source and destination
+page, how they are converted, and which is used when.
+
+chrysn has starting to draft a page on [[plugins/write/names]], and
+would appreciate review and comments.
+
+Themability
+-----------
+
+The default theme of ikiwiki is more appealing to the people who are expected
+to run an ikiwiki setup; end users with Web 2.$current_minor_version
+expectations often don't have there tastes served well.
+
+[[Recently|version 3.20130518]], templates have become more theming friendly,
+but for the general case still require the theme to be known to mainline
+ikiwiki, lest generic templates diverge. A planned feature is generalized
+sidebars, where more places inside the template can be filled using the same
+mechanism as currently used in the [[pluginssidebar]] pluin, but changes there
+require a complete rebuild. (Not worse than the rebuilds required when changing
+the main templates, but it would be more tempting to frequently change them.)
+
+Examples of fancy ikiwiki themes have been brought up:
+
+ * https://www.gresille.org/
+ * https://nos-oignons.net/
+ * http://www.rezine.org/accueil/association/
+ * https://cas-libres.poivron.org/
+
+A generalized version of the [[bootstrap|http://twitter.github.com/bootstrap/]]
+[[theme|theme market]] would be appreciated, as the current one is targeted
+towards a particular installation.
+
+Performance
+-----------
+
+Rebuilding many pages takes considerable time, especially when sidebars are
+changed.
+
+A faster way to use the page index (eg. sqlite) would help speeding up the
+usual rebuilds, but would not help speeding up massive rebuilds.
+
+RDF backend
+-----------
+
+On the priority level "crazy ideas", it was discussed to augment or finally
+change the index to an RDF triple collection. Statements would be extracted
+from the source pages in the scan hook, and form a triple store. Pagespecs
+would be resolved to SPARQL queries against that database; also the
+[[todo/structured page data]] fields could be addressed with this.
+
+Optimizations are still possible, even more generally, for example with
+dependencies on other pages' title:
+
+* page A sets its own title with `\[[!meta title="the A page"]]`, which results
+ in the statement '`<./page_A> dc:title "the A page" .`'.
+
+* page B uses some kind of auto-titling link to page A: `\[[~|page A]]`, which
+ queries for '`<./page_A> dc:title ?a`'.
+
+* When page B is built, it is stored that it depends on statements involving
+ the term `<./page_A>`, and the current hash value of all statements about
+ that term. (Those will be computed for all observed statements at scan time.
+ Pages that use more complex queries might not be able to list all their
+ dependencies.)
+
+ Also, the queries and query results executing during building page B are
+ stored in a separate cache.
+
+* When some other page starts linking to page A, the first cache is
+ invalidated, as now there are more statements on the subject of
+ '`<./page_A>`', so page B might need to be rebuilt. Before that is done, its
+ cached queries are executed. If their results did not change, page B does not
+ need any further action.
+
+vCard support
+-------------
+
+The topic of combining ikiwiki with
+[[calypso|http://keithp.com/blogs/calypso/]] was brought up
+[[in another event|http://penta.debconf.org/dc13_schedule/events/1087.en.html]]
+during the same DebConf.
+
+For further details, see [[todo/vCard rendering]].
+++ /dev/null
-# ACodispo
-
The gitweb is [here](http://source.ikiwiki.branchable.com/?p=source.git;a=summary).
Commits to this git repository are fed into [KGB](http://kgb.alioth.debian.org/)
-for transmission to the #ikiwiki irc channel.
+for transmission to the #ikiwiki irc channel. See also: [[spam_fighting]]
## personal git repositories
* [[ikiplugins|DavidBremner]] `git://pivot.cs.unb.ca/git/ikiplugins.git`
* [[jonas|JonasSmedegaard]] `git://source.jones.dk/ikiwiki-upstream`
* [[arpitjain]] `git://github.com/arpitjain11/ikiwiki.git`
-* [[chrysn]] `git://github.com/github076986099/ikiwiki.git`
+* [[chrysn]] `git://prometheus.amsuess.com/ikiwiki`
* [[simonraven]] `git://github.com/kjikaqawej/ikiwiki-simon.git`
* [[schmonz]] `git://github.com/schmonz/ikiwiki.git`
* [[will]] `http://www.cse.unsw.edu.au/~willu/ikiwiki.git`
[[SubPage]] of "blog") will be part of the blog, and the newest 10
of them will appear in the page. Note that if files that are not pages
match the [[PageSpec]], they will be included in the feed using RSS
-enclosures, which is useful for podcasting.
+enclosures, which is useful for simple [[!iki podcast desc=podcasting]];
+for fuller-featured podcast feeds, enclose media files in blog posts
+using [[meta]].
The optional `rootpage` parameter tells the wiki that new posts to this
blog should default to being [[SubPages|SubPage]] of "blog", and enables a
[[!iki plugins/htmlscrubber desc=htmlscrubber]] plugin is enabled, since it can be used to
insert unsafe content.
+* enclosure
+
+ Specifies a link to a file to be rendered as an "enclosure" in
+ RSS/Atom feeds (and a plain old link in HTML). Useful for
+ [[!iki podcast desc=podcasting]].
+
* redir
Causes the page to redirect to another page in the wiki.
suitable for publishing on a website. Ikiwiki stores pages and history in a
[[revision_control_system|rcs]] such as [[Subversion|rcs/svn]] or [[rcs/Git]].
There are many other [[features]], including support for
-[[blogging|blog]], as well as a large array of [[plugins]].
+[[blogging|blog]] and [[podcasting|podcast]], as well as a large
+array of [[plugins]].
[[!template id=links]]
+++ /dev/null
-ikiwiki 3.20121212 released with [[!toggle text="these changes"]]
-[[!toggleable text="""
- * filecheck: Fix bug that prevented File::MimeInfo::Magic from ever
- being used.
- * openid: Display openid in Preferences page as a comment, so it can be
- selected in all browsers."""]]
\ No newline at end of file
--- /dev/null
+ikiwiki 3.20130711 released with [[!toggle text="these changes"]]
+[[!toggleable text="""
+ * Deal with git behavior change in 1.7.2 and newer that broke support
+ for commits with an empty commit message.
+ * Pass --no-edit when used with git 1.7.8 and newer."""]]
\ No newline at end of file
>> So now after two days debugging and tracing, I just fixed that and it works. Well, at least a learnt
>> a lot about ikiwiki internal ;-)
>> Thanks for all the work you did on the plugin! --Lukas
+
+----
+Hi smcv, we spoke on irc the other day. Passed `show => "0"` on line 126 in album.pm to remove the limit on the thumbnails shown on the album page. Setting it on the album directive didn't work. As mentioned above by Jaime setting the thumbnailsize doesn't catch either. Or rather if I git push after changing the album directive the generated thumbnails (the image files) are the correct size as set in the directive. The html however uses the default thumbnailsize as hardcoded in album.pm and has broken thumbnails as it links to a file with the default size in the filename.
+
+Issuing `ikiwiki --rebuild` knocks the system into another gear where the thumbnails show up correctly but this is only due to the html being the same as above (linking to hardcoded thumbnailsize) but the generated thumbnail images are now matching the hardcoded size ignoring the thumbnailsize attribute on the album directive.
+
+For me this behaviour is way beyond my skills to sort out (I'm no coder). The albumplugin ikiwiki combo is very attractive to me and the plugin i soo close to working!
+
+I've changed the behavior of the "slideshow" to show the next image when clicking the large image as downloading a full resolution image is a rare use case in a gallery of this type imho. The large clicktarget means you are likely to unnecessarily download large files otherwise. I can't quite follow the template, album.pm flow so I can't figure out how to put a "download full resolution" link on the viewer page which would be my next step. To achieve the next link i added ` link => ($nextpage or $album),` around line 454 in `my $img`
+
+My wishlist for the plugin would include:
+
+- Reading exif info from the imagefile
+- Keeping the full resolution image files out of version control
+- Being able to create new albums by tag or bym anually picking images from other albums. Could be a simple comma separated list of viewer names, or even full urls, in the album directive.
+
+--kjs
+
+----
+
+++ /dev/null
-[[!template id=plugin name=dynamiccookies author="[[schmonz]]"]]
-[[!template id=gitbranch branch=schmonz/dynamiccookies author="[[schmonz]]"]]
-[[!tag type/web]]
-
-This plugin populates ikiwiki's cookiejar by calling an external
-program. The program is expected to print the serialized cookies
-on `stdout` in a form which can be `eval`'d (e.g., `Data::Dumper`).
-
-The plugin author's use case for this seemingly hacky interface:
-aggregating authenticated feeds at work, where for various reasons
-the needed cookies must be acquired using a separate `perl` from
-the one used by ikiwiki.
This is an expanded and updated version of [[Jason Blevin|users/jasonblevins]]'s pandoc plugin. Get it and see further details at <https://github.com/dubiousjim/pandoc-iki>.
+A version, merging enhancements in various forks is available at <https://github.com/sciunto/ikiwiki-pandoc>. PR are welcome.
+++ /dev/null
-[[!template id=plugin name=proxies author="[[schmonz]]"]]
-[[!template id=gitbranch branch=schmonz/proxies author="[[schmonz]]"]]
-[[!tag type/web]]
-
-This plugin enables ikiwiki to open outbound connections (such as
-found in [[plugins/aggregate]], [[plugins/openid]], and [[plugins/pinger]])
-via a proxy. The proxy can be configurably avoided for connections
-to certain domains.
-
-### To do
-
-* Move duplicated user-agent setup out of other plugins into this one.
-* While I'm at it, fix [[bugs/http_proxy_for_openid]].
+++ /dev/null
-[[!template id=plugin name=wc author="[[schmonz]]"]]
-[[!template id=gitbranch branch=schmonz/wc author="[[schmonz]]"]]
-[[!tag type/meta]]
-[[!tag patch]]
-
-This plugin counts words in a page. For a single page, write a
-`\[[!wc]]` directive and the word count will be interpolated there.
-For a site, add `<TMPL_VAR WORDCOUNT>` to your [[templates]].
-
-If [[!cpan HTML::Strip]] is installed, the wordcount will be slightly
-more accurate.
-
-Possible enhancements:
-
-* Optimize: count words iff the result will be displayed. `sanitize()`
- seems like the right place to count. Since it's called well after
- `preprocess()`, I can tell whether a directive needs the result,
- but since it appears to be called before `pagetemplate()`, I can't
- tell whether a template wants to know and possibly skip the
- computation. (In other words, if I add `$needed_for_template`
- like `$needed_for_directive`, it gets set too late for `sanitize()`
- to see.)
--- /dev/null
+[[!template id=plugin name=wordcount author="[[schmonz]]"]]
+[[!template id=gitbranch branch=schmonz/wordcount author="[[schmonz]]"]]
+[[!tag type/meta]]
+[[!tag patch]]
+
+This plugin counts words in a page. For a single page, write a
+`\[[!wordcount]]` directive and the word count will be interpolated there.
+For a site, add `<TMPL_VAR WORDCOUNT>` to your [[templates]].
+
+If [[!cpan HTML::Strip]] is installed, the wordcount will be slightly
+more accurate.
+
+Possible enhancements:
+
+* Optimize: count words iff the result will be displayed. `sanitize()`
+ seems like the right place to count. Since it's called well after
+ `preprocess()`, I can tell whether a directive needs the result,
+ but since it appears to be called before `pagetemplate()`, I can't
+ tell whether a template wants to know and possibly skip the
+ computation. (In other words, if I add `$needed_for_template`
+ like `$needed_for_directive`, it gets set too late for `sanitize()`
+ to see.)
Thanks for prompt reply.All working. I will post on my site tonight and link here what I did on CentOS to make this work. --[Mick](http://www.lunix.com.au)
Any hint on how to highlight actual mdwn or any other supported markup code? -- [wiebel](http://wiebels.info)
+
+Hi, I would like put code with higlight plugin (\[[!format php """ code code code """]]) between 2 sublists. Something like that:
+
+* level 1
+ * level 2
+[[!format php """
+<?php
+echo "coucou";
+?>
+"""]]
+
+ * level 2
+
+You can see that the second level 2 is not correctly translate. It is translate in level 1 :-(. What is the good syntax ? -- Julien
--- /dev/null
+*This page is work in progress and can not be considered to be part of the
+documentation so far.*
+
+This page describes the various forms the name of a page can take.
+
+[[!table data="""
+concept | example
+-----------------|-----------------------------------------------------------------------------
+title | '`dir A/page B`' unless a meta title is defined
+link | '`dir_A/page_B`' or '`dir A/page B`' or '`page_B`' / '`page B`' when inside dirA
+page name | '`dir_A/page_B`'
+source file | '`/dir_A/page_B/index.mdwn`' or '`/dir_A/page_B.mdwn`' depending on indexpages
+rendered file | '`/dir_A/page_B/`' or '`/dir_A/page_B.html`' depending on usedirs
+"""]]
+
+
+functions:
+
+* `pagename`: source file to page name
+* `pagetitle`: page name to title but not considering meta title
+* `titlepage`: title to page name but not considering meta title
+* `linkpage`: link to page name
+* `htmllink`: page name to html link to rendered file
+* `bestlink`: link to page name
+* `htmlpage`: page name to rendered file
--- /dev/null
+A podcast is just a [[blog]] with RSS/Atom feeds containing media
+enclosures. Once you've made a blog, ikiwiki gives you two ways to
+turn it into a podcast.
+
+## Basic podcasting
+
+Put a media file where you'd put a blog post, and your `\[[!inline]]`
+directive will include it as a media-only post.
+
+## Fancier podcasting
+
+Put the media file where `\[[!inline]]` _won't_ directly include it.
+Write a blog post with `\[[!meta enclosure="WikiLink/to/media.mp3"]]`
+and your show notes.
+
+-----
+
+See also [[tips/migrating_podcast_to_ikiwiki]].
This is the [[SandBox]], a page anyone can edit to try out ikiwiki
(version [[!version ]]).
-hello world
+hello world (right back at ya)
> This is a blockquote.
>
--- /dev/null
+ikiwiki.info is occasionally attacked by persistent spammers who keep
+making changes. Anyone can revert spam changes, and several people do.
+
+If you find a spammer, and revert their spam, please post a link to the
+spam commit here. After review, these can be used to ban spammers by login,
+or for the really persistent ones, by IP (or even IP range).
+
+If you're a trusted member of the ikiwiki community and would like access
+to the setup branch in git to be able to update the ban list, get in touch
+with joey with a ssh public key. Once your key is added, you will be able
+to edit the ikiwiki.setup file on the setup branch, and push changes.
+You will also be able to access the recent web server logs to find IP
+addresses, by running: `ssh b-ikiwiki@ikiwiki.info logdump > access.log`
+
+## Spam commits
+
+[[!table data="""
+Commit | Date | Login | IP
+854588d | 2013/08/15 | webschen1 | 79.228.6.28
+d8f1faa | 2013/08/16 | webschen1 | 79.228.11.79
+d1dbd8e | 2013/08/19 | webschen1 | 79.228.8.176
+6602052 | 2013/08/21 | webschen1 | 31.17.11.19
+"""]]
+
+[[schmonz]] attempted to ban `webschen1` and `ip(79.228.*.*)` and got this:
+
+ remote: you are not allowed to change refs/heads/setup
+ To git://git.ikiwiki.info/
+ ! [remote rejected] setup -> setup (pre-receive hook declined)
+ error: failed to push some refs to 'git://git.ikiwiki.info/'
border-bottom: 1px solid #000;
}
-.inlinecontent {
+.inlinecontent,
+.inlineenclosure {
margin-top: .4em;
}
<br clear="both" />
-[[!img monochrome_small.png align==left]] The **monochrome** theme,
+[[!img monochrome_small.png align=left]] The **monochrome** theme,
based on [[Jon]]'s homepage design.
<br clear="both" />
--- /dev/null
+[[!meta title="Hosting Ikiwiki with a master git repository on a remote machine"]]
+
+This tutorial explains how to set up a wiki such that:
+
+- the machine running Ikiwiki is not the same as the one hosting the git repository;
+- changes can be done using CGI;
+- changes can be done using git (using ssh protocol).
+
+This configuration may be useful when:
+
+- you do not want (or cannot) connect to the machine hosting your wiki using
+ `git` or `ssh`;
+- you do not want (or cannot) publish web content on the machine hosting you
+ remotely accessible git repository.
+
+I assume the [[rcs]] used is [[rcs/git]], but it might be done for other rcs.
+
+# Similar and related tips and problems
+
+- [[http://www.icanttype.org/blog/ikiwiki_git_remote_repo/]] Similar to what I
+ am describing, excepted that you must be able to connect to the machine
+ hosting Ikiwiki using ssh.
+- [[forum/How_to_specify_repository_is_on_a_remote_host__63__]] My solution
+ solves the problem described here, excepted that svn is used there, and in
+ the comments, Joey advice not to do this with svn.
+- [[forum/how_to_setup_ikiwiki_on_a_remote_host]] My solution might answer this
+ problem.
+
+# Overview
+
+By default, when creating a wiki, Ikiwiki creates and uses two repositories: a
+bare repository, and a « slave » repository, used as the source to render the
+wiki. All of these are on the same machine.
+
+Instead of having the bare repository hosted on the same machine, we will host
+it on a remote machine, and tell Ikiwiki to use it instead of its local one. We
+will also ensure that the wiki is rendered whenever a commit is done to the git
+repository.
+
+# Conventions
+
+- We are building a wiki called *SITE*.
+- The machine running Ikiwiki and a web server is called the *Ikiwiki machine*.
+- The machine hosting the git repository is called the *git machine*. Users can
+ make git pull and push to this machine.
+
+# Let's go!
+
+## Creating ssh keys on the Ikiwiki machine
+
+- Create a pair of ssh keys, not password-protected (as they will be used by
+ script). Let's call them `id_SITE` and `id_SITE.pub`. These keys will be used
+ by the ikiwiki machine to connect to the git machine.
+
+## Creating and setting up a repository on the git machine
+
+- Create a repository `SITE.git` on the git machine (using `git init --bare`),
+ and ensure that public key `id_SITE.pub` can pull from and push to this
+ repository (using `~/.ssh/config` or by setting the right permissions on
+ gitolite or gitosis).
+
+## Creating the wiki on the ikiwiki machine
+
+- Create the wiki following [[the regular procedure|setup]]. You should have,
+ among others, a directory `SITE.git`, being the master git repository, and a
+ directory `SITE`, clone of `SITE.git`, used as source directory to render the
+ wiki.
+- Ensure that your web server can serve the rendered wiki, and that changes can
+ be done with CGI.
+
+## Configuring the wiki on the wiki machine so that it uses the repository of the git machine
+
+- Configure ssh so that it uses the ssh key `id_SITE` to connect to the git
+ michine: add the following lines to file `~/.ssh/config` on the ikiwiki
+ machine:
+
+ Host server.name.of.the.git.machine
+ User git-machine-user
+ IdentityFile ~/.ssh/id_SITE
+
+- Configure the local copy `SITE` of the wiki (on the ikiwiki machine) to use
+ the remote git repository instead of the local `SITE.git`. To do so, in the
+ file `SITE/.git/config`, replace the lines:
+
+ [remote "origin"]
+ url = /path/to/SITE.git
+ fetch = +refs/heads/*:refs/remotes/origin/*
+
+ by the lines:
+
+ [remote "origin"]
+ url = git-machine-user@server.name.of.the.git.machine:SITE.git
+ fetch = +refs/heads/*:refs/remotes/origin/*
+
+- In this repository (`SITE`), run `git pull` and `git push` to ensure that
+ everything works fine. It *works fine* when you will be able to run `git
+ pull` and `git push` without user interaction.
+
+- Disable the `post-update` hook in ikiwiki: it is used if the git commits are
+ done on this machine, which is no longer the case. To do so, in file
+ `SITE.setup`, comment the line:
+
+ git_wrapper => '/path/to/SITE.git/hooks/post-update',
+
+- Tell Ikiwiki to push to the ikiwiki machine when a commit is done by the web
+ (CGI). To do so, in file `SITE.setup`, add the line:
+
+ git_wrapper_background_command => 'git push',
+
+- Enable plugin [[pingee|http://ikiwiki.info/plugins/pingee/]]. It allows git
+ (on the git machine) to tell ikiwiki to update and rebuild the wiki when
+ commits are done on the git repository, using only an http connection. To do
+ so, add `pingee` to the list of enabled plugins (variable `add_plugins` in
+ file `SITE.setup`).
+
+- Rebuild the wiki (since you chaned the setup file `SITE.setup`).
+
+ ikiwiki --setup SITE.setup --rebuild --verbose
+
+## Configure the git repository (on the git machine) to update the wiki after a push
+
+- Add in the `post-receive` hook (file `SITE.git/hooks/post-receive`):
+
+ wget "http://WIKI-URL/ikiwiki.cgi?do=ping" -O /dev/stdout
+
+ If your wiki is password protected, use:
+
+ wget "http://LOGIN:PASSWORD@WIKI-URL/ikiwiki.cgi?do=ping" -O /dev/stdout
+
+
+## Going further
+
+- *Web server on a third machine* It should be possible to use a third machine
+ to host the web server. A hook might be used to export the rendered wiki on
+ this server, or use a nfs repository as the destination repository of
+ ikiwiki. However, allowing web modifications (using CGI) might be tricky…
+- *Using [[gitolite|https://github.com/sitaramc/gitolite]] to manage
+ repositories on the git machine* Simply replace the manipulations of git on
+ the git machine by the corresponding manipulations using gitolite.
--- /dev/null
+First, get familiar with [[howto avoid flooding aggregators]].
+
+## Into ikiwiki, from another CMS
+
+7. Copy content into ikiwiki, taking care to:
+ 7. Match article paths to preserve old permalinks.
+ 7. Match enclosure paths (or use redirects) to preserve old URLs.
+ 7. Match titles, post dates, and guids with `\[[!meta]]`.
+ 7. Match feed paths with permanent redirects from old Atom
+ location to `index.atom` (likewise for RSS).
+ 7. `\[[!inline]]` the articles.
+7. Rerun `ikiwiki --setup`.
+
+## Within ikiwiki, from basic to fancy
+
+7. Exclude media files from `\[[!inline]]`.
+7. Replace direct `\[[WikiLinks]]` to media files with
+ `\[[!meta enclosure="WikiLink/to/media.mp3"]]`.
+7. Rerun `ikiwiki --setup`.
-ikiwiki's simple podcasting, while elegant and minimal, doesn't (as
-mentioned in [[todo/blogging]]) produce full-featured feeds. In
-fancy podcasts, episodes are accompanied by text content. The feeds
-also have lots more metadata.
-
-[[!toc]]
-
## Status
[[!template id=gitbranch branch=schmonz/fancypodcast author="[[schmonz]]"]]
[[!tag patch]]
-In summary, the branch preserves ikiwiki's existing podcast behavior,
-adds more featureful behavior, and has been tested to work well in
-some common podcatchers. I believe it is ready for review and
-possible integration, and I'd like to get feedback to that effect
-(or to the contrary) before making further enhancements. I know
-[[joey]]'s the final arbiter here, but I'd appreciate any qualified,
-critical eyes ([[smcv]]?) raking over my diffs. --[[schmonz]]
+Nothing new since 2013/07/21 [[merge|done]] to `master`.
## Features
Episode enclosure |(./) |(./) |(./) |(./)
"""]]
-## Design
-
-7. For each fancy podcast episode, write a blog post containing
- `\[[!meta enclosure="WikiLink/to/media.mp3"]]`. (Don't specify
- more than one enclosure -- but if you do, last one wins.)
-7. When rendering to HTML (single-page or inlined), append a link
- to the media file.
-7. When rendering to RSS/Atom, the text is the entry's content and
- the media file is its enclosure.
-7. Don't break simple podcasts in pursuit of fancy podcasts.
-
-## Implementation
-
-### Completed
-
-* Cover the existing simple podcast behavior with tests.
-* Add an `enclosure` field to [[plugins/meta]] that expands the
- given [[ikiwiki/WikiLink]] to an absolute URL (feed enclosures
- pretty much need to be, and the reference feeds I've looked at
- all do this).
-* Write failing tests for the desired single-page and inlined
- HTML behavior, then make them pass by adding enclosure stanzas
- to `{,inline}page.tmpl`.
-* Write failing tests for the desired RSS/Atom behavior, then make
- them pass via changes to `{atom,rss}item.tmpl` and [[plugins/inline]].
-* Match feature-for-feature with
- [tru_podcast](http://www.rainskit.com/blog/542/tru_podcast-a-podcasting-plugin-for-textpattern)
- (what [[schmonz]] will be migrating from).
-* Enrich [feed metadata](http://cyber.law.harvard.edu/rss/rss.html)
- by catching up `rsspage.tmpl` to `atompage.tmpl`.
-* Verify that [[plugins/more]] plays well with fancy podcasts.
-* Verify that the feeds validate.
-* Subscribe to a fancy feed in some common podcatchers and verify
- display details against a reference podcast.
-* Verify smooth transitions for two common use cases (see testing
- details below).
-
-### Must-have (for [[schmonz]], anyway)
-
-* Think carefully about UTF-8.
-* Verify that _all_ the tests pass (not just my new ones).
-
-## Migration
-
-### Upgrading within ikiwiki: from simple to fancy
-
-#### My test podcast
-
-For this test, I chose a podcast that tries to work around ikiwiki's
-current limitations by issuing two separate `inline`s:
-
-* One with `feedonly=yes` that includes `.mdwn`, `.pdf`, and `.mp3`
-* One with `feeds=no` that includes only `.mdwn` (and makes a trail)
-
-This has the following effects:
-
-* Browser: sees just the articles (each of which has a manually
- created link to its corresponding media file)
-* Feedreader: sees all the articles and media in one flat stream
-* Podcatcher: sees just the media (sans articles)
-
-I want instead to write one `inline` with these effects:
-
-* Browser: sees just the articles (each of which automatically links
- to its enclosure)
-* Feedreader: sees just the articles (each of which specifies its
- enclosure)
-* Podcatcher: sees just the enclosures (each of which has an enclosing
- article, rendered as the media's "description")
-
-#### Upgrade steps
-
-7. Set up a non-production copy of the podcast.
- 7. Visually diff RSS and Atom feeds against production.
- 7. Subscribe to the copy (both feeds) in `r2e`, iTunes, Downcast.
-7. Apply fancypodcast patch to the installed ikiwiki:
- 7. `cd ~/Documents/trees/ikiwiki && git checkout fancypodcast`
- 7. `git diff --no-prefix master > ~/Documents/trees/localpatches/www/ikiwiki/fancypodcast.diff`
- 7. `cd ~/Documents/trees/pkgsrc-current/www/ikiwiki && make deinstall && make install clean`
-7. Verify that simple podcasts are unaffected:
- 7. Rerun `ikiwiki --setup`.
- 7. `diff -uB simple-before.rss simple-after.rss`
- * A few new elements and attributes, as expected.
- 7. `diff -uB simple-before.atom simple-after.atom`
- * No change.
-7. Remove the feed-only `inline` and enable feeds on the remaining one.
-7. Convert articles' manual download links to `\[[!meta enclosure=""]]`.
-7. I want existing and future podcatchers to get my new fancy
- episodes, and I know my podcast isn't in any planets, so I'm
- going to skip [[tips/howto avoid flooding aggregators]].
-7. Rerun `ikiwiki --setup`.
-7. Verify browser shows the same stuff.
-7. `diff -uB simple-after.rss fancy-after.rss # and atom`
- * MP3s and PDFs are no longer naked enclosures, but belong to
- articles as they should.
- * Articles have updated modification times, as they should.
-7. `r2e run` (both RSS and Atom)
- * Nothing new with the default `trust-guid = True` (otherwise
- would expect updated articles).
-7. iTunes "Update Podcast" (both RSS and Atom)
- * Added one episode per article, with article text as the episode
- description.
- * Kept old naked-enclosure episodes around.
-7. Downcast refresh (RSS):
- * Added one episode per article, with article text as the episode
- description.
- * Kept old naked-enclosure episodes around.
-7. Downcast refresh (Atom):
- * Added one episode per article, with no episode description
- (expected, see feature table).
- * Kept old naked-enclosure episodes around.
-
-Different tradeoffs are possible. These seem okay to me.
-
-### Importing into ikiwiki: fancy (from another CMS)
-
-#### My test podcast
-
-For this test, I chose a podcast currently being published with
-Textpattern and tru_podcast, because I'd strongly prefer to publish
-it with ikiwiki instead.
-
-#### Upgrade steps
-
-7. Set up a non-production copy of the podcast.
- 7. Visually diff RSS and Atom feeds against production.
- 7. Subscribe to the copy (both feeds) in `r2e`, iTunes, Downcast.
-7. With a fancypodcast-enabled ikiwiki installed:
- 7. Copy content from Textpattern to ikiwiki:
- 7. Match article paths to preserve `/YYYY/MM/DD/post-title` permalinks.
- 7. Match enclosure paths (or redirect) to preserve Textpattern's URLs.
- 7. Match titles, post dates, and guids with `\[[!meta]]`.
- 7. Match feed paths with permanent redirects from `/atom/` to
- `/index.atom` (and same for RSS).
- 7. `\[[!inline]]` the articles.
- 7. Rerun `ikiwiki --setup`.
-7. Stop Textpattern, start ikiwiki.
-7. Verify that podcatchers see the feeds and don't redownload anything.
-7. Naively add two new blog posts, one with an enclosure.
-7. Verify that podcatchers download the new enclosures.
-
------
-
## Future improvements
### iTunes fancy podcasting
* Don't render template text (e.g., "Use this template to insert a
note into a page") in feeds.
+ * [[schmonz]] observes this in a dual-feed (one for enclosures,
+ another for browsers) scenario, but not in the typical case.
+ Remaining to be verified: that once migrated to fancypodcast,
+ this stops being a problem.
* Optionally specify the enclosure's:
* MIME type, in case `File::MimeInfo` guesses wrong.
* Duration, in case `ffprobe` guesses wrong.
* Configurably generate additional subscription links (such as
iTunes) alongside the RSS/Atom ones in [[plugins/inline]].
* Support Apple's "enhanced podcasts" (if they're still relevant).
-
-### code review
-
- + # XXX better way to compute relative to srcdir?
- + my $file = $absurl;
- + $file =~ s|^$config{url}/||;
-
-I don't think ikiwiki offers a better way to do that, because there is
-normally no reason to do that. Why does it need an url of this form here?
---[[Joey]]
-
-> In all the popular, production-quality podcast feeds I've looked
-> at, enclosure URLs are always absolute (even when they could be
-> expressed concisely as relative). [Apple's
-> example](http://www.apple.com/itunes/podcasts/specs.html#example)
-> does too. So I told \[[!meta]] to call `urlto()` with the third
-> parameter true, which means the \[[!inline]] code here gets an
-> absolute URL in `$pagestate{$p}{meta}{enclosure}`. To compute the
-> enclosure's metadata, though, we of course need it as a local path.
-> I didn't see a less
-> [ongepotchket](http://www.jewish-languages.org/jewish-english-lexicon/words/1402)
-> way at the time. If you have a better idea, I'm happy to hear it;
-> if not, I'll add an explanatory comment. --[[schmonz]]
-
->> I would be more comfortable with this if two two different forms of url
->> you need were both generated by calling urlto. It'd be fine to call
->> it more than once. --[[Joey]]
-
->>> Heh, it was even easier than that! (Hooray for tests.) Done.
->>> --[[schmonz]]
-
- +<TMPL_IF HTML5><section id="inlineenclosure"><TMPL_ELSE><div id="inlineenclosure"></TMPL_IF>
- +<TMPL_IF ENCLOSURE>
-
-Can't we avoid adding this div when there's no enclosure? --[[Joey]]
-
-> Sure, I've moved the `<TMPL_IF ENCLOSURE>` check to outside the
-> section-and-div block for `{,inline}page.tmpl`. --[[schmonz]]
-
- +<a href="<TMPL_VAR ENCLOSURE>">Download this episode</a>
-
-"Download this episode" is pretty specific to particular use cases.
-Can this be made more generic, perhaps just "Download"? --[[Joey]]
-
-> Yep, I got a little carried away. Done. --[[schmonz]]
-
- -<TMPL_IF AUTHOR>
- - <title><TMPL_VAR AUTHOR ESCAPE=HTML>: <TMPL_VAR TITLE></title>
- - <dcterms:creator><TMPL_VAR AUTHOR ESCAPE=HTML></dcterms:creator>
-
-This change removes the author name from the title of the rss feed, which
-does not seem necessary for fancy podcasts. And it is a change that
-could negatively impact eg, Planet style aggregators using ikiwiki. --[[Joey]]
-
-> While comparing how feeds render in podcatchers, I noticed that
-> RSS and Atom were inconsistent in a couple ways, of which this was
-> one. The way I noticed it: with RSS, valuable title space was being
-> spent to display the author. I figured Atom's display was the one
-> worth matching. You're right, of course, that planets using the
-> default template and somehow relying on the current author-in-the-title
-> rendering for RSS feeds (but not Atom feeds!) would be broken by
-> this change. I'm having trouble imagining exactly what would break,
-> though, since guids and timestamps are unaffected. Would it suffice
-> to provide a note in the changelog warning people to be careful
-> upgrading their planets, and to customize `rssitem.tmpl` if they
-> really prefer the old behavior (or don't want to take any chances)?
-> --[[schmonz]]
-
->> A specific example I know of is updo.debian.net, when used with
->> rss2email. Without the author name there, one cannot see who posted
->> an item. It's worth noting that planet.debian.org does the same thing
->> with its rss feed. (That's probably what I copied.) Atom feeds may
->> not have this problem, don't know. --[[Joey]]
-
->>> Okay, that's easy to reproduce. It looks like this _might_ be
->>> a simple matter of getting \[[!aggregate]] to populate author in
->>> `add_page()`. I'll see what I can figure out. --[[schmonz]]
-
- +++ b/templates/rsspage.tmpl
- + xmlns:atom="http://www.w3.org/2005/Atom"
- +<atom:link href="<TMPL_VAR FEEDURL>" rel="self" type="application/rss+xml" />
-
-Why is it using atom namespace inside an rss feed? What are the chances
-every crummy rss reader on earth is going to understand this? I'd put it at
-about 0%; I doubt ikiwiki's own rss reader understands such a mashup.
---[[Joey]]
-
-> The validator I used (<http://validator.w3.org/>, I think) told me to.
-> Pretty sure it doesn't make anything work better in the podcatchers
-> I tried. Hadn't considered that it might break some readers.
-> Removed. --[[schmonz]]
-
- +<generator>ikiwiki</generator>
-
-Does this added tag provide any benefits? --[[Joey]]
-
-> Consistency with the Atom feed, and of course it trumpets ikiwiki
-> to software and/or curious humans who inspect their feeds. The tag
-> arrived only in RSS 2.0, but that's already the version we're
-> claiming to be, and it's over a decade old. Seems much less risky
-> than the atom namespace bits. --[[schmonz]]
-
->> Sounds ok then. --[[Joey]]
--- /dev/null
+# Round 1
+
+ikiwiki's simple podcasting, while elegant and minimal, doesn't (as
+mentioned in [[todo/blogging]]) produce full-featured feeds. In
+fancy podcasts, episodes are accompanied by text content. The feeds
+also have lots more metadata.
+
+## Design
+
+7. For each fancy podcast episode, write a blog post containing
+ `\[[!meta enclosure="WikiLink/to/media.mp3"]]`. (Don't specify
+ more than one enclosure -- but if you do, last one wins.)
+7. When rendering to HTML (single-page or inlined), append a link
+ to the media file.
+7. When rendering to RSS/Atom, the text is the entry's content and
+ the media file is its enclosure.
+7. Don't break simple podcasts in pursuit of fancy podcasts.
+
+## Implementation
+
+### Completed
+
+* Cover the existing simple podcast behavior with tests.
+* Add an `enclosure` field to [[plugins/meta]] that expands the
+ given [[ikiwiki/WikiLink]] to an absolute URL (feed enclosures
+ pretty much need to be, and the reference feeds I've looked at
+ all do this).
+* Write failing tests for the desired single-page and inlined
+ HTML behavior, then make them pass by adding enclosure stanzas
+ to `{,inline}page.tmpl`.
+* Write failing tests for the desired RSS/Atom behavior, then make
+ them pass via changes to `{atom,rss}item.tmpl` and [[plugins/inline]].
+* Match feature-for-feature with
+ [tru_podcast](http://www.rainskit.com/blog/542/tru_podcast-a-podcasting-plugin-for-textpattern)
+ (what [[schmonz]] will be migrating from).
+* Enrich [feed metadata](http://cyber.law.harvard.edu/rss/rss.html)
+ by catching up `rsspage.tmpl` to `atompage.tmpl`.
+* Verify that [[plugins/more]] plays well with fancy podcasts.
+* Verify that the feeds validate.
+* Subscribe to a fancy feed in some common podcatchers and verify
+ display details against a reference podcast.
+* Verify smooth transitions for two common use cases (see testing
+ details below).
+* Code review: don't add enclosure divs unless we have enclosures.
+* Code review: genericize download link for more use cases.
+* Code review: don't confuse old readers with Atom names in RSS.
+* Code review: instead of hacking back to `$link`, just provide it.
+* Code review: show author in addition to feedname, if different.
+
+### Code review
+
+ + # XXX better way to compute relative to srcdir?
+ + my $file = $absurl;
+ + $file =~ s|^$config{url}/||;
+
+I don't think ikiwiki offers a better way to do that, because there is
+normally no reason to do that. Why does it need an url of this form here?
+--[[Joey]]
+
+> In all the popular, production-quality podcast feeds I've looked
+> at, enclosure URLs are always absolute (even when they could be
+> expressed concisely as relative). [Apple's
+> example](http://www.apple.com/itunes/podcasts/specs.html#example)
+> does too. So I told \[[!meta]] to call `urlto()` with the third
+> parameter true, which means the \[[!inline]] code here gets an
+> absolute URL in `$pagestate{$p}{meta}{enclosure}`. To compute the
+> enclosure's metadata, though, we of course need it as a local path.
+> I didn't see a less
+> [ongepotchket](http://www.jewish-languages.org/jewish-english-lexicon/words/1402)
+> way at the time. If you have a better idea, I'm happy to hear it;
+> if not, I'll add an explanatory comment. --[[schmonz]]
+
+>> I would be more comfortable with this if two two different forms of url
+>> you need were both generated by calling urlto. It'd be fine to call
+>> it more than once. --[[Joey]]
+
+>>> Heh, it was even easier than that! (Hooray for tests.) Done.
+>>> --[[schmonz]]
+
+ +<TMPL_IF HTML5><section id="inlineenclosure"><TMPL_ELSE><div id="inlineenclosure"></TMPL_IF>
+ +<TMPL_IF ENCLOSURE>
+
+Can't we avoid adding this div when there's no enclosure? --[[Joey]]
+
+> Sure, I've moved the `<TMPL_IF ENCLOSURE>` check to outside the
+> section-and-div block for `{,inline}page.tmpl`. --[[schmonz]]
+
+ +<a href="<TMPL_VAR ENCLOSURE>">Download this episode</a>
+
+"Download this episode" is pretty specific to particular use cases.
+Can this be made more generic, perhaps just "Download"? --[[Joey]]
+
+> Yep, I got a little carried away. Done. --[[schmonz]]
+
+ -<TMPL_IF AUTHOR>
+ - <title><TMPL_VAR AUTHOR ESCAPE=HTML>: <TMPL_VAR TITLE></title>
+ - <dcterms:creator><TMPL_VAR AUTHOR ESCAPE=HTML></dcterms:creator>
+
+This change removes the author name from the title of the rss feed, which
+does not seem necessary for fancy podcasts. And it is a change that
+could negatively impact eg, Planet style aggregators using ikiwiki. --[[Joey]]
+
+> While comparing how feeds render in podcatchers, I noticed that
+> RSS and Atom were inconsistent in a couple ways, of which this was
+> one. The way I noticed it: with RSS, valuable title space was being
+> spent to display the author. I figured Atom's display was the one
+> worth matching. You're right, of course, that planets using the
+> default template and somehow relying on the current author-in-the-title
+> rendering for RSS feeds (but not Atom feeds!) would be broken by
+> this change. I'm having trouble imagining exactly what would break,
+> though, since guids and timestamps are unaffected. Would it suffice
+> to provide a note in the changelog warning people to be careful
+> upgrading their planets, and to customize `rssitem.tmpl` if they
+> really prefer the old behavior (or don't want to take any chances)?
+> --[[schmonz]]
+
+>> A specific example I know of is updo.debian.net, when used with
+>> rss2email. Without the author name there, one cannot see who posted
+>> an item. It's worth noting that planet.debian.org does the same thing
+>> with its rss feed. (That's probably what I copied.) Atom feeds may
+>> not have this problem, don't know. --[[Joey]]
+
+>>> Okay, that's easy to reproduce. It looks like this _might_ be
+>>> a simple matter of getting \[[!aggregate]] to populate author in
+>>> `add_page()`. I'll see what I can figure out. --[[schmonz]]
+
+>>>> Yep, that was mostly it. If the feed entry defines an author,
+>>>> and the author is distinct from the feed name, we now show `NAME:
+>>>> AUTHOR`, else just show `NAME` (same as always). In addition,
+>>>> the W3 feed validator says `<dcterms:creator>` is invalid, so
+>>>> I replaced it with `<dc:creator>`, and all of a sudden `r2e`
+>>>> gives me better `From:` headers. With the latest on my branch,
+>>>> when I generate the same planet as updo and run `r2e` over it,
+>>>> the names I get in `From:` look like so:
+
+* `"updo: Junio C Hamano"`
+* `"updo: Greg Kroah-Hartman"`
+* `"updo: Eric Raymond: esr"` (article author != feed name, so we get both)
+* `"updo: Jannis Pohlman: Jannis Pohlmann"` (oops! I tweaked the real updo)
+
+>>>> --[[schmonz]]
+
+ +++ b/templates/rsspage.tmpl
+ + xmlns:atom="http://www.w3.org/2005/Atom"
+ +<atom:link href="<TMPL_VAR FEEDURL>" rel="self" type="application/rss+xml" />
+
+Why is it using atom namespace inside an rss feed? What are the chances
+every crummy rss reader on earth is going to understand this? I'd put it at
+about 0%; I doubt ikiwiki's own rss reader understands such a mashup.
+--[[Joey]]
+
+> The validator I used (<http://validator.w3.org/feed/>) told me to.
+> Pretty sure it doesn't make anything work better in the podcatchers
+> I tried. Hadn't considered that it might break some readers.
+> Removed. --[[schmonz]]
+
+ +<generator>ikiwiki</generator>
+
+Does this added tag provide any benefits? --[[Joey]]
+
+> Consistency with the Atom feed, and of course it trumpets ikiwiki
+> to software and/or curious humans who inspect their feeds. The tag
+> arrived only in RSS 2.0, but that's already the version we're
+> claiming to be, and it's over a decade old. Seems much less risky
+> than the atom namespace bits. --[[schmonz]]
+
+>> Sounds ok then. --[[Joey]]
--- /dev/null
+it has been some years since the [[matching different kinds of links]] issue
+was tackled, but hardly a plugin is using it.
+
+in order to enhance on the [[todo/rel attribute for links]] and [[todo/better bug tracking support]]
+issues and to provide a more general infrastructure, i'd like to propose a
+generic plugin for typed links. it can be also viewed of a way to have
+[[todo/structured page data]] that consists of URLs inside the wiki.
+
+following the use case i've developed it for, i'll call it `blocks` for the
+moment (but am open to better suggestions).
+
+outline
+=======
+
+the plugin has a **configuration option** called `blocks_names`, which consists
+of pairs of verbs; the typical example is `blocks/blockedby`, but other values
+could be `next/prev up/down` or `owner/owns`.
+
+for each verb in the options, there is a **directive** which is used to state
+the relationship; relationships can be declared on both ends, so a page `bugA`
+with the contents `\[[!blocks bugB]]` is semantically equivalent to a page
+`bugB` with the contents `\[[!blockedby bugA]]`.
+
+for each verb, there is also a **pagespec** which matches all pages that are
+the origin of a relationship to a given page. if `developerA` `\[[!owns
+bug1]]`, then if `bug1` contains `\[[!map pages="owns(.)"]]`, it will show the
+owning developer. these specs match both ways, ie. if `bug1` `\[[!owner
+developerA]]`, the said map directive will still produce the same result.
+
+details
+=======
+
+* single word relationships vs. symmetric relationships
+
+ with some verbs, it is possible that a relationship is only used in one
+ direction (eg `index`, even though one could declare it as
+ `index/isindexof`).
+
+ > isindexof is not a very interesting relationship - it just clogs up
+ > the link-map, since the index is "the index of" all pages. I can't
+ > see any situation in which you'd want to do pagespec matching
+ > on it? --[[smcv]]
+
+ >> that's why i used `index` as an example of a one-direction relationship.
+ >>
+ >> it wouldn't clog up the link map, though: in order to cleanly match both
+ >> directions, when the "inverse" term of a relationship is used, the link in
+ >> taggedlinks uses the "forward" term, but switches the objects.
+ >>
+ >> --[[chrysn]]
+
+ other verbs are symmetric, eg. `equivalent`, which need different treatment.
+
+* "taglink" style directives
+
+ the [[plugins/tag]] plugin would be a special case for this plugin (apart
+ from the autotag and tagdir features). as there is a `\[[!taglink ...]]`
+ directive, there could be an analogous directive for every single directive.
+
+ > This is basically the traillink/trailitem duality, too.
+ > I'd be quite tempted to generalize to something like this:
+ >
+ > We can't fix [[!link blocks="bug123" text="Bug 123"]] until we do this.
+ >
+ > [[!hiddenlink owner="smcv"]]
+ >
+ > but perhaps that's too wordy?
+ >
+ > I think both trail and tag need their own special processing beyond the
+ > general case, but maybe not? --[[smcv]]
+
+ >> i'd be all in favor of having this unified and deeper; there has been the
+ >> idea of a `\[[!link]]` directive [[again|todo/link plugin perhaps too general__63__]]
+ >> and [[again|todo/do not make links backwards]].
+ >>
+ >> i like the `\[[!link text=""]]` and `[[!hiddenlink]]` conventions, but
+ >> think that ${REL}="${TARGET}" isn't ideal because it implies that a single
+ >> link can have more than one target. instead, i'd go for
+ >> `\[[!link to="bug123" rel="blocks" text="Bug 123"]]; as with the html rel
+ >> parameter, rel would be a list of whitespace separated values.
+ >>
+ >> positional parameters (`\[[!link bug123 rel="blocks" text="Bug 123"]]` or
+ >> even `\[[!link Bug 123|bug123 rel="blocks"]]`) would be possible, but i
+ >> prefer explicit syntax and not joining stings back again with the
+ >> whitespace that was split off it before.
+ >>
+ >> if the '|' character is not widespread in page names (which i assume it is
+ >> not), instead of using positional parameters in `\[[!link]]` for
+ >> shortcuts, we could extend the regular link syntax; the same relationship
+ >> could then be declared as `\[[Bug 123|bug123|blocks]]`; this would be an
+ >> easy extension to the original link syntax. it would even work for hidden links
+ >> (`\[[|smcv|owner]]`), which previously made no sense because a link with
+ >> neither a physicial representation nor metadat is of no use.
+ >>
+ >> --[[chrysn]]
+
+* implementation notes
+
+ the way pagespec hooks are implemented required some nasty perl tricks, for
+ which the people who showed me felt very bad for having spoilt me. indeed,
+ `no strict refs;` and `*$forward_name = $forward_match;` are not exactly
+ ideal. a change in the pagespec declaration api (why not just `hook` like
+ everything else) would make the implementation cleaner.
+
+ > How about replacing `blockedby(bug*)` with `linktype(blockedby bug*)` or
+ > something? Then you'd only need one pseudo-hook. --[[smcv]]
+
+ >> there has been the topic of pagespecs like `typedlink(type glob)` back in
+ >> the [[matching different kinds of links]] discussion, but it was removed
+ >> in favor of per-type matchers. --[[chrysn]]
+
+* configuration location
+
+ i aimed for static configuration of the `block_names` in the setup file. this
+ could be made more general like in the [[plugins/shortcut]] plugin, but that
+ would make things more complex.
+
+* no html links with `rel=` yet
+
+ as there are no taglink style links between the articles so far, no htmllink
+ gets rendered that could carry the relationship name in its rel field.
+
+ having the inverse relationship description in backlinks (as in the link
+ created by the map directive in the example above) would be hard to
+ implement. (actually, i think it'd be easier to determine the rel values from
+ the taggedlinks for *every* htmllink than to influence the backlinks in this
+ plugin).
+
+* one direction also creates a normal link
+
+ due to the way add\_link treats relationships, the forward relationship is
+ always going to be reflected in the links/backlinks. a section of
+ [[todo/matching different kinds of links]] was dismissed with "let's not
+ worry about it", this plugin might be reason to worry about it again. (i'd
+ consider what is in @links to be a representation of which hyperlinks are
+ there, and in this case, none are generated).
+
+ > taglink and traillink already count as wikilinks without generating
+ > any visible HTML. --[[smcv]]
+
+implementation
+==============
+
+there is a working but slightly incomplete (basically where it comes to the
+details mentioned above) implementation in [[blocks.pm]].
+
+--[[chrysn]]
--- /dev/null
+#!/usr/bin/perl
+# Ikiwiki "blocks" relationship plugin.
+package IkiWiki::Plugin::blocks;
+
+use warnings;
+use strict;
+use IkiWiki 3.00;
+
+sub import {
+ hook(type => "getsetup", id => "blocks", call => \&getsetup);
+ hook(type => "checkconfig", id => "skeleton", call => \&checkconfig);
+}
+
+sub getsetup () {
+ return
+ plugin => {
+ safe => 1,
+ rebuild => 1,
+ },
+ blocks_names => {
+ type => "string",
+ example => "blocks/blockedby",
+ description => "comma separated list of defined relationship pairs, the forward and backward name separated by a slash",
+ safe => 1,
+ rebuild => 1,
+ },
+}
+
+sub checkconfig () {
+ my $blocksnames;
+ if (defined $config{blocks_names}) {
+ $blocksnames = $config{blocks_names};
+ } else {
+ $blocksnames = "blocks/blockedby";
+ }
+
+ while ( $blocksnames =~ /([^ ]+)/g )
+ {
+ if ( $1 =~ m@([a-zA-Z0-9]+)(/([a-zA-Z0-9]+))?@ )
+ {
+ my $from = $1;
+ my $to = $3;
+ hook(
+ type => "preprocess",
+ shortcut => 1, # gets interpreted by listdirectives; see doc/bugs/cannot_preview_shortcuts.mdwn / ikiwiki commit 354d22e2
+ no_override => 1,
+ id => $from,
+ scan => 1,
+ call => sub { preprocess_blocks($from, 1, @_); }
+ );
+ if ($to)
+ {
+ hook(
+ type => "preprocess",
+ shortcut => 1,
+ no_override => 1,
+ id => $to,
+ scan => 1,
+ call => sub { preprocess_blocks($from, 0, @_); }
+ );
+ }
+
+ my $backward_match; my $backward_name;
+ my $forward_match; my $forward_name;
+
+ $backward_match = sub ($$;@) {
+ my $page=shift;
+ my $glob=shift;
+ return IkiWiki::PageSpec::match_backlink($page, $glob, linktype => $from, @_);
+ };
+
+ $backward_name = "IkiWiki::PageSpec::match_$from";
+
+ if ($to)
+ {
+ $forward_match = sub ($$;@) {
+ my $page=shift;
+ my $glob=shift;
+ return IkiWiki::PageSpec::match_link($page, $glob, linktype => $from, @_);
+ };
+
+ $forward_name = "IkiWiki::PageSpec::match_$to";
+ }
+
+ {
+ no strict 'refs';
+
+ if ($to)
+ {
+ *$forward_name = $forward_match;
+ }
+ *$backward_name = $backward_match;
+ }
+ } else {
+ error gettext("Malformed option in blocks_names");
+ }
+ }
+}
+
+sub preprocess_blocks ($$@) {
+ # with flip=0, the directive occurring on page A pointing at page B
+ # means that A $relation B, with flip=1, it means B $relation A
+ my $relation = shift;
+ my $flip = shift;
+
+ if (! @_) {
+ return "";
+ }
+ my %params=@_;
+ my $page = $params{page};
+ delete $params{page};
+ delete $params{destpage};
+ delete $params{preview};
+
+ foreach my $blocks (keys %params) {
+ $blocks=linkpage($blocks);
+
+ # hidden WikiLink
+ if ( $flip == 0 ) {
+ add_link($page, $blocks, $relation);
+ } else {
+ add_link($blocks, $page, $relation);
+ }
+ }
+
+ return "";
+}
+
+1
[[!tag wishlist patch plugins/inline]]
-[[!template id=gitbranch branch=chrysn/patches author="[[chrysn]]"]]
+[[!template id=gitbranch branch=chrysn/inlineautotitles author="[[chrysn]]"]]
for postforms in inlines of pages which follow a certain scheme, it might not
be required to set the title for each individual post, but to automatically set
munging mechanism, which appends numbers to page titles in case that page
already exists.
-two patches ([1], [2]) set inline up for that, adding an additional `autotitle`
+two patches (b568eb25, 34bc82f2) set inline up for that, adding an additional `autotitle`
parameter. if that is given, the regular input of the inline postform will be
replaced with a hidden input of that text. in addition, the empty title is
permitted (both for autotitle and regular titles, as they go in the same GET
in "the 10%-rule").
both can be circumvented by using another GET parameter for autotexts, as
-implemented in [3].
+implemented in 40dc10a4.
> this patch still does not work perfectly; especially, it should make a
> distinction between "autotitle is set but equal ''" (in which case it
> should create a page named `1.mdwn`, and "autotitle is not set, and title is
cool (eg for creating an entry with yesterday's date), but would be a bit of a
problem with static pages. javascript could help with the date part, but name
munging would be yet another thing.
-
-[1]: http://github.com/github076986099/ikiwiki/commit/b568eb257a3ef5ff49a84ac00a3a7465b643c1e1
-[2]: http://github.com/github076986099/ikiwiki/commit/34bc82f232be141edf036d35e8ef5aa289415072
-[3]: http://github.com/github076986099/ikiwiki/commit/40dc10a4ec7809e401b4497c2abccfba30f7a2af
--- /dev/null
+[[!template id=gitbranch branch=schmonz/proxy author="[[schmonz]]"]]
+[[!tag patch]]
+
+## Background
+
+Ikiwiki has several plugins that open outbound connections:
+
+* [[plugins/aggregate]]
+* [[plugins/openid]]
+* [[plugins/pinger]]
+
+In some networks it is desired (or necessary) for these connections
+to traverse a proxy. Proxies are usually configured via environment
+variables, so for ikiwiki it is probably not desirable (or necessary)
+to manage proxy configuration via new purpose-specific settings in
+the setup file; `ENV` ought to suffice.
+
+[[!cpan LWP::UserAgent]] by default doesn't care about the usual
+environment variables, but if its constructor is passed a true
+`env_proxy` value, it does. Then environment settings such as these
+influence the agent, as expected:
+
+ ENV:
+ http_proxy: 'http://foo.internal:8080'
+ no_proxy: 'localhost,.bar.internal'
+
+If the proxy and/or destination requires authentication, it may
+also be necessary to prime `cookiejar`.
+
+## Changes
+
+This patch causes ikiwiki's `LWP::UserAgent` instances to:
+
+* honor proxy configuration in the environment
+* consistently make use of cookies configured in the setup file
+
+## Limitations
+
+The patch lets me configure [[plugins/aggregate]] to traverse my
+corporate proxy and to skip the proxy for internal hosts. I haven't
+tested it with the other two plugins, both of which prefer [[!cpan
+LWPx::ParanoidAgent]] if present, which deliberately lacks proxy
+support. There exists [[!cpan LWP::UserAgent::Paranoid]] claiming
+to be more modern, but I haven't looked at it further, so I don't
+know whether its paranoia measures up or whether it supports proxies.
+
+## See also
+
+* [[bugs/http_proxy_for_openid]]
+* [[forum/Setting_http__95__proxy]]
+
+> This looks fine, but you forgot to move the config entry for cookiejar
+> from aggregate to the config setting list in IkiWiki.pm --[[Joey]]
+
+>> Indeed, fixed. --[[schmonz]]
+
+>>> [[merged|done]] --[[Joey]]
--- /dev/null
+As described on [Tails issue #5879](https://labs.riseup.net/code/issues/5879),
+cgit wants URLs like:
+
+ https://git-tails.immerda.ch/tails/commit/wiki/src/todo/fix_localized_iceweasel_search_engine.mdwn?id=c682a361418bef7bce040ea2905691d54be192ab
+
+... while ikiwiki generates links like:
+
+ https://git-tails.immerda.ch/tails/commit/wiki/src/todo%2Ffix_localized_iceweasel_search_engine.mdwn?id=c682a361418bef7bce040ea2905691d54be192ab
+
+on the recentchanges page.
+
+Would it make sense to add an option to skip URL-encoding when doing
+the substitution of `\[[file]]` in `diffurl` and `historyurl`?
+
+--[[intrigeri]]
+
+> In `91b477c` I made a similar change to how the cvs backend encodes
+> `diffurl`. Perhaps it applies to all VCS backends? --[[schmonz]]
+
+[[wishlist]]
--- /dev/null
+Addressbooks are commonly kept on a CardDAV server, where the individual
+contacts are represented as single vCard (`.vcf`) files. Supporting those files
+as a source format could facilitate integration between ikiwiki and address
+books, and allow linking wiki entries to people. With the plugin presented here
+and the [[calypso|http://keithp.com/blogs/calypso/]] server, that is easily
+possible.
+
+The idea was first presented on [[DebConf13|forum/debconf13 ikiwiki bof]] by
+[[chrysn]].
+
+Software involved
+=================
+
+* CardDAV server
+
+ ikiwiki generates its pages from the local file system. Using the
+ [[calypso|http://keithp.com/blogs/calypso/]] CardDAV/CalDAV server makes that
+ trivial, as it stores its contacts as plain files already (and under git
+ version control!).
+
+ The files can be pulled into ikiwiki in the form of an underlay.
+
+* Rendering VCF files
+
+ A python plugin called `vcf` has been published in chrysn's plugin repository
+ at `git://prometheus.amsuess.com/ikiwiki-plugins`. It supports contact images
+ (as `data:` URLs) as well as all the attributes Evolution generates, creates
+ external hyperlinks for those attributes that can be dereferenced (including
+ telephone numbers), and renders vCard categories into ikiwiki tags.
+
+ Python was chosen as a programming language due to the availability of the
+ [[`vobject`|http://vobject.skyhouseconsulting.com/]] python module and the
+ author's personal preference.
+
+* Linking to contacts
+
+ File names of vCard files are usually not usable
+ (`4FD53349-51280409-28AD50CB.vcf`). To make links like "`I've talked to
+ \[[John Doe]] about this`" possible, the [[todo/alias directive]] has to be
+ implemented in the same repository. (Work in progress.)
+
+Issues / Further development
+============================
+
+* The rendered output is currently a plain definition list, and could need some
+ theming and/or grouping.
+
+ The current implementation does not use templating, and instead generates
+ HTML directly in Python (with the help of the
+ [[MarkupSafe|https://pypi.python.org/pypi/MarkupSafe]] module). Due to the
+ [[limitations of the foreign language API|plugins/write/external]], ikiwiki's
+ regular templating can't be used anyway.
+
+* Addresses are rendered by `python-vcard` builtin address renderer (plus a
+ hack to get unicode out of it), and are not always most beautiful; moreover,
+ this kind of rendering is not themable.
+
+* The `paramlist` words in the vcards are plain dumped out after the respective
+ fields. That's unsuitable for theming, and would need localization.
+
+* In general, the plugin is not internationalized.
+
+* All VCF pages currently receive a 'Contact' tag. That needs configurability
+ and/or localization. In the same vein, prefix configuration for the category
+ taggings would be useful too.
+
+* The alias plugin is not ready yet.
+
+* vCards can not be edited. To be precise: vCards can be edited, but they show
+ vCard sources which are not suitable for user editing. Moreover, the changes
+ would be committed into the wiki main from the underlay, and not travel back.
+
+ Two approaches would make editing feasible, a relatively easy one and a
+ hard-but-wow-if-it-works one:
+
+ * Use a dedicated vCard editing online tool like CardDavMate, which accepts
+ URLs for editing (unlike CardDavMate). Ideally we would even hijack the
+ edit button on vCard pages to take the user to a proper editor.
+
+ * Have a simple edit mask for vCard objects. (It'd be ok if it was incomplete
+ with respect to fields, the vCard format is designed to make that
+ possible). That's related to the
+ [[structured data|forum/an alternative approach to structured data]] issue.
+
+ In such a setup, the vCard list would not go to an underlay, but rather to
+ a git checkout in a subdirectory or a submodule, from which the change
+ could then be pushed back to Calypso. (AFAIK, different git roots are not
+ supported in ikiwiki yet.)
+
+* Would such a renderer be useful for vCalendar too? (vCard and vCalendar only
+ differ in what they can represent, not in how they do it.)
+
+Status
+======
+
+The setup is currently usable, but just barely so. Once the basic issues are
+solved (aliases, parameters for tags, enhanced appearence), I will suggest this
+plugin for inclusion in ikiwiki.
+
+-- [[chrysn]]
i'm working on a python xmlrpc plugin for ikiwiki to support wikitrails, both as a navigation feature (have "forward" and "back" links based on a sequence) and a modified inline that includes all pages in the trail with appropriate modifications (suitable for printing if necessary).
-the current status is published on `git://github.com/github076986099/ikiwiki-plugins.git`; as of now, i don't have a public demo of it.
+the current status is published on `git://prometheus.amsuess.com/ikiwiki-plugins`; as of now, i don't have a public demo of it.
feedback on both the concept and the code is very much appreciated by [[discussion]] or [email](mailto:chrysn@fsfe.org).
+> update as of 2013: this implementation is kept in said ikiwiki-plugins directory for historical reference only; with the implementation nowadays available in ikiwiki, my implementation is obsolete. --[[chrysn]]
+
#### usage
two preprocessor commands are provided:
--- /dev/null
+Hi, by default xapian/omega use locate param from blog.setup to set stemmer language when wiki is indexing.
+
+But, when you search, we use omega cgi, and we not set language, so if you indexing in french, but search in english, you have a bad result.
+
+I propose to set a new param omega_stemmer in blog.setup, to fix the same language when we indexing, and searching. And if omega_stemmer is not set, we use LANG env param.
+
+Bellow, you can find the patch.
+
+
+
+ diff --git a/IkiWiki/Plugin/search.pm b/IkiWiki/Plugin/search.pm
+ index 42d2e0d..08a0a01 100644
+ --- a/IkiWiki/Plugin/search.pm
+ +++ b/IkiWiki/Plugin/search.pm
+ @@ -33,6 +33,13 @@ sub getsetup () {
+ safe => 0, # external program
+ rebuild => 0,
+ },
+ + omega_stemmer => {
+ + type => "string",
+ + example => "en",
+ + description => "language used for indexing and searching",
+ + safe => 0, # external program
+ + rebuild => 0,
+ + },
+ }
+
+ sub checkconfig () {
+ @@ -136,7 +143,7 @@ sub indexhtml (@) {
+ # Index document and add terms for other metadata.
+ my $tg = Search::Xapian::TermGenerator->new();
+ if (! $stemmer) {
+ - my $langcode=$ENV{LANG} || "en";
+ + my $langcode=$config{omega_stemmer} || $ENV{LANG} || "en";
+ $langcode=~s/_.*//;
+
+ # This whitelist is here to work around a xapian bug (#486138)
+ @@ -183,6 +190,18 @@ sub cgi ($) {
+ IkiWiki::loadindex();
+ $ENV{HELPLINK}=htmllink("", "", "ikiwiki/searching",
+ noimageinline => 1, linktext => "Help");
+ + my $langcode=$config{omega_stemmer} || $ENV{LANG} || "en";
+ + $langcode=~s/_.*//;
+ +
+ + # This whitelist is here to work around a xapian bug (#486138)
+ + my @whitelist=qw{da de en es fi fr hu it no pt ru ro sv tr};
+ +
+ + if (grep { $_ eq $langcode } @whitelist) {
+ + $ENV{STEMMER}=$langcode;
+ + }
+ + else {
+ + $ENV{STEMMER}="en";
+ + }
+ exec($config{omega_cgi}) || error("$config{omega_cgi} failed: $!");
+ }
+ }
+ diff --git a/templates/searchquery.tmpl b/templates/searchquery.tmpl
+ index 15bc78e..4742460 100644
+ --- a/templates/searchquery.tmpl
+ +++ b/templates/searchquery.tmpl
+ @@ -1,6 +1,6 @@
+ $setmap{prefix,title,S}
+ $setmap{prefix,link,XLINK}
+ -$set{thousand,$.}$set{decimal,.}$setmap{BN,,Any Country,uk,England,fr,France}
+ +$set{thousand,$.}$set{decimal,.}$setmap{BN,,Any Country,uk,England,fr,France}$set{stemmer,$env{STEMMER}}
+ ${
+ $def{PREV,
+ $if{$ne{$topdoc,0},<INPUT TYPE=image NAME="<" ALT="<"
+
+Regards,
+
+[[!tag patch]]
--- /dev/null
+# ACodispo
+
* **website**: <http://christian.amsuess.com/>
* **uses ikiwiki for**: a bunch of internal documentation / organization projects
* **likes ikiwiki because**: it is a distributed organization tool that pretends to be a web app for the non-programmers out there
+
+chrysn is [[interested|interests]] in these pages:
+
+[[!map pages="backlink(users/chrysn/interests) and !link(todo/done) and !link(bugs/done) and !."]]
+
+chrysn was interested in these pages before they were done:
+
+[[!map pages="backlink(users/chrysn/interests) and (link(todo/done) or link(bugsdone))"]]
--- /dev/null
+these are the topics [[chrysn]] is or was interested in inside ikiwiki:
+
+* [[bugs/linkmap displays underscore escapes]]
+* [[bugs/methodResponse in add__95__plugins]]
+* [[bugs/preprocessing loop control too tight]]
+* [[bugs/proxy.py utf8 troubles]]
+* [[bugs/Underscores in links don't appear]]
+* [[bugs/unicode encoded urls and recentchanges]]
+* [[bugs/wrong link in recentchanges when reverting an ikiwiki outside git root]]
+* [[forum/debconf13 ikiwiki bof]]
+* [[forum/link to an image inside the wiki without inlining it]]
+* [[forum/managing todo lists]]
+* [[ikiwiki/directive/map/discussion]]
+* [[plugins/write/names]]
+* [[todo/ad-hoc plugins]]
+* [[todo/alias directive]]
+* [[todo/auto-create tag pages according to a template]]
+* [[todo/Better bug tracking support]]
+* [[todo/calendar with "create" links]]
+* [[todo/credentials page]]
+* [[todo/flexible relationships between pages]]
+* [[todo/inline postform autotitles]]
+* [[todo/internal definition list support]]
+* [[todo/mirrorlist with per-mirror usedirs settings]]
+* [[todo/more customisable titlepage function]]
+* [[todo/natural sorting]]
+* [[todo/sort parameter for map plugin and directive]]
+* [[todo/tracking bugs with dependencies]]
+* [[todo/unaccent url instead of encoding]]
+* [[todo/Using page titles in internal links]]
+* [[todo/vCard rendering]]
+* [[todo/wikitrails]]
… To put it short: an Ikiwiki newbie.
+[Altai State University]: http://www.asu.ru/
[Emacs]: http://www.gnu.org/software/emacs/
[Lynx]: http://lynx.isc.org/
+[Software Freedom Day]: http://sf-day.org/
## Wikis
Currently, I run a few Ikiwiki instances. Namely:
-* <http://lhc.am-1.org/lhc/>
- — to hold random stuff written by me, my colleagues,
- students, etc.
+* <http://sfd.am-1.org/>
+ — [Software Freedom Day][]
+ event at [Altai State University][].
+
+* <https://spire.am-1.org/>
+ — introductory materials
+ on XML, Markdown, Ikiwiki, etc.
+ in Russian.
+
+* <http://am-1.org/~ivan/networks-2011/>
+ — bits & pieces related to the course on computer
+ networks I've read in 2011.
-* <http://rsdesne.am-1.org/rsdesne-2010/>
- — for some of the materials related to the
+* http://rsdesne.am-1.org/rsdesne-2010/
+ **(down since December, 2012)**
+ — used to hold some of the materials related to the
“Remote Sensing in Education, Science and National
Economy” (2010-03-29 … 2010-04-10, Altai State
- University) program I've recently participated in as
+ University) program I've participated in as
an instructor.
-* <http://nets.asu591.ru/networks-2011/>
- — for bits & pieces related to the course on computer
- networks I've read in 2011.
+* http://lhc.am-1.org/lhc/
+ **(down since December, 2012)**
+ — used to hold random stuff written by me, my colleagues,
+ students, etc.
## Preferences
* At work, team documentation and project planning: product and sprint
backlogs, burndown charts, release plans/procedures/announcements,
- aggregating feeds of shared interest, etc. (with the
- [[plugins/contrib/dynamiccookies]] and [[plugins/contrib/proxies]] plugins)
+ aggregating feeds of shared interest, etc.
* On my laptop, personal to-do and scratch space
* [A small personal site](http://podcast.schmonz.com/) (happily hosted at
[Branchable](http://www.branchable.com/))
## Non-yet-ikiwiki
-* [My personal web site](http://www.schmonz.com/) (pending
- [[todo/fancypodcast]] integration)
+* [My personal web site](http://www.schmonz.com/) (to be happily
+ hosted at Branchable when it's running a post-[[todo/fancypodcast]]
+ release)
Name: ikiwiki
-Version: 3.20130710
+Version: 3.20130711
Release: 1%{?dist}
Summary: A wiki compiler
msgstr ""
"Project-Id-Version: PACKAGE VERSION\n"
"Report-Msgid-Bugs-To: \n"
-"POT-Creation-Date: 2013-07-10 18:33-0400\n"
+"POT-Creation-Date: 2013-07-10 21:56-0400\n"
"PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n"
"Last-Translator: FULL NAME <EMAIL@ADDRESS>\n"
"Language-Team: LANGUAGE <LL@li.org>\n"
msgid "%s is an attachment, not a page."
msgstr ""
-#: ../IkiWiki/Plugin/git.pm:783 ../IkiWiki/Plugin/git.pm:846
+#: ../IkiWiki/Plugin/git.pm:789 ../IkiWiki/Plugin/git.pm:852
#: ../IkiWiki.pm:1721
#, perl-format
msgid "you are not allowed to change %s"
msgstr ""
-#: ../IkiWiki/Plugin/git.pm:805
+#: ../IkiWiki/Plugin/git.pm:811
#, perl-format
msgid "you cannot act on a file with mode %s"
msgstr ""
-#: ../IkiWiki/Plugin/git.pm:809
+#: ../IkiWiki/Plugin/git.pm:815
msgid "you are not allowed to change file modes"
msgstr ""
-#: ../IkiWiki/Plugin/git.pm:879
+#: ../IkiWiki/Plugin/git.pm:885
msgid "you are not allowed to revert a merge"
msgstr ""
-#: ../IkiWiki/Plugin/git.pm:896
+#: ../IkiWiki/Plugin/git.pm:902
#, perl-format
msgid "Failed to revert commit %s"
msgstr ""
--- /dev/null
+#!/usr/bin/perl
+use warnings;
+use strict;
+
+BEGIN {
+ eval q{use XML::Feed; use HTML::Parser; use HTML::LinkExtor};
+ if ($@) {
+ eval q{use Test::More skip_all =>
+ "XML::Feed and/or HTML::Parser not available"};
+ }
+ else {
+ eval q{use Test::More tests => 136};
+ }
+}
+
+use Cwd;
+use File::Basename;
+
+my $tmp = 't/tmp';
+my $statedir = 't/tinypodcast/.ikiwiki';
+
+sub podcast {
+ my $podcast_style = shift;
+
+ my $baseurl = 'http://example.com';
+ my @command = (qw(./ikiwiki.out -plugin inline -rss -atom));
+ push @command, qw(-underlaydir=underlays/basewiki);
+ push @command, qw(-set underlaydirbase=underlays -templatedir=templates);
+ push @command, "-url=$baseurl", qw(t/tinypodcast), "$tmp/out";
+
+ ok(! system("mkdir $tmp"),
+ q{setup});
+ ok(! system(@command),
+ q{build});
+
+ my %media_types = (
+ 'simplepost' => undef,
+ 'piano.mp3' => 'audio/mpeg',
+ 'scroll.3gp' => 'video/3gpp',
+ 'walter.ogg' => 'video/x-theora+ogg',
+ );
+
+ for my $format (qw(atom rss)) {
+ my $feed = XML::Feed->parse("$tmp/out/$podcast_style/index.$format");
+
+ is($feed->title, $podcast_style,
+ qq{$format feed title});
+ is($feed->link, "$baseurl/$podcast_style/",
+ qq{$format feed link});
+ is($feed->description, 'wiki',
+ qq{$format feed description});
+ if ('atom' eq $format) {
+ is($feed->author, $feed->description,
+ qq{$format feed author});
+ is($feed->id, $feed->link,
+ qq{$format feed id});
+ is($feed->generator, "ikiwiki",
+ qq{$format feed generator});
+ }
+
+ for my $entry ($feed->entries) {
+ my $title = $entry->title;
+ my $url = $entry->id;
+ my $body = $entry->content->body;
+ my $enclosure = $entry->enclosure;
+
+ is($entry->link, $url, qq{$format $title link});
+ isnt($entry->issued, undef,
+ qq{$format $title issued date});
+ isnt($entry->modified, undef,
+ qq{$format $title modified date});
+
+ if (defined $media_types{$title}) {
+ is($url, "$baseurl/$title",
+ qq{$format $title id});
+ is($body, undef,
+ qq{$format $title no body text});
+ is($enclosure->url, $url,
+ qq{$format $title enclosure url});
+ is($enclosure->type, $media_types{$title},
+ qq{$format $title enclosure type});
+ cmp_ok($enclosure->length, '>', 0,
+ qq{$format $title enclosure length});
+ }
+ else {
+ # XXX hack hack hack
+ my $expected_id = "$baseurl/$title/";
+ $expected_id =~ s/\ /_/g;
+
+ is($url, $expected_id,
+ qq{$format $title id});
+ isnt($body, undef,
+ qq{$format $title body text});
+
+ if ('fancy' eq $podcast_style) {
+ isnt($enclosure, undef,
+ qq{$format $title enclosure});
+ my $filename = basename($enclosure->url);
+ is($enclosure->type, $media_types{$filename},
+ qq{$format $title enclosure type});
+ cmp_ok($enclosure->length, '>', 0,
+ qq{$format $title enclosure length});
+ }
+ else {
+ is($enclosure, undef,
+ qq{$format $title no enclosure});
+ }
+ }
+ }
+ }
+
+ ok(! system("rm -rf $tmp $statedir"), q{teardown});
+}
+
+sub single_page_html {
+ my @command = (qw(./ikiwiki.out));
+ push @command, qw(-underlaydir=underlays/basewiki);
+ push @command, qw(-set underlaydirbase=underlays -templatedir=templates);
+ push @command, qw(t/tinypodcast), "$tmp/out";
+
+ ok(! system("mkdir $tmp"),
+ q{setup});
+ ok(! system(@command),
+ q{build});
+
+ my $html = "$tmp/out/pianopost/index.html";
+ like(_extract_html_content($html, 'content'), qr/has content and/m,
+ q{html body text});
+ like(_extract_html_content($html, 'enclosure'), qr/Download/m,
+ q{html enclosure});
+ my ($href) = _extract_html_links($html, 'piano');
+ is($href, '/piano.mp3',
+ q{html enclosure sans -url is site-absolute});
+
+ $html = "$tmp/out/attempted_multiple_enclosures/index.html";
+ like(_extract_html_content($html, 'content'), qr/has content and/m,
+ q{html body text});
+ like(_extract_html_content($html, 'enclosure'), qr/Download/m,
+ q{html enclosure});
+ ($href) = _extract_html_links($html, 'walter');
+ is($href, '/walter.ogg',
+ q{html enclosure sans -url is site-absolute});
+
+ my $baseurl = 'http://example.com';
+ ok(! system(@command, "-url=$baseurl", q{--rebuild}));
+
+ $html = "$tmp/out/pianopost/index.html";
+ ($href) = _extract_html_links($html, 'piano');
+ is($href, "$baseurl/piano.mp3",
+ q{html enclosure with -url is fully absolute});
+
+ $html = "$tmp/out/attempted_multiple_enclosures/index.html";
+ ($href) = _extract_html_links($html, 'walter');
+ is($href, "$baseurl/walter.ogg",
+ q{html enclosure with -url is fully absolute});
+
+ ok(! system("rm -rf $tmp $statedir"), q{teardown});
+}
+
+sub inlined_pages_html {
+ my @command = (qw(./ikiwiki.out -plugin inline));
+ push @command, qw(-underlaydir=underlays/basewiki);
+ push @command, qw(-set underlaydirbase=underlays -templatedir=templates);
+ push @command, qw(t/tinypodcast), "$tmp/out";
+
+ ok(! system("mkdir $tmp"),
+ q{setup});
+ ok(! system(@command),
+ q{build});
+
+ my $html = "$tmp/out/fancy/index.html";
+ my $contents = _extract_html_content($html, 'content');
+ like($contents, qr/has content and an/m,
+ q{html body text from pianopost});
+ like($contents, qr/has content and only one/m,
+ q{html body text from attempted_multiple_enclosures});
+ my $enclosures = _extract_html_content($html, 'inlineenclosure');
+ like($enclosures, qr/Download/m,
+ q{html enclosure});
+ my ($href) = _extract_html_links($html, 'piano.mp3');
+ is($href, '/piano.mp3',
+ q{html enclosure from pianopost sans -url});
+ ($href) = _extract_html_links($html, 'walter.ogg');
+ is($href, '/walter.ogg',
+ q{html enclosure from attempted_multiple_enclosures sans -url});
+
+ ok(! system("rm -rf $tmp $statedir"), q{teardown});
+}
+
+sub _extract_html_content {
+ my ($file, $desired_id, $desired_tag) = @_;
+ $desired_tag = 'div' unless defined $desired_tag;
+
+ my $p = HTML::Parser->new(api_version => 3);
+ my $content = '';
+
+ $p->handler(start => sub {
+ my ($tag, $self, $attr) = @_;
+ return if $tag ne $desired_tag;
+ return unless exists $attr->{id} && $attr->{id} eq $desired_id;
+
+ $self->handler(text => sub {
+ my ($dtext) = @_;
+ $content .= $dtext;
+ }, "dtext");
+ }, "tagname,self,attr");
+
+ $p->parse_file($file) || die $!;
+
+ return $content;
+}
+
+sub _extract_html_links {
+ my ($file, $desired_value) = @_;
+
+ my @hrefs = ();
+
+ my $p = HTML::LinkExtor->new(sub {
+ my ($tag, %attr) = @_;
+ return if $tag ne 'a';
+ return unless $attr{href} =~ qr/$desired_value/;
+ push(@hrefs, values %attr);
+ }, getcwd() . '/' . $file);
+
+ $p->parse_file($file);
+
+ return @hrefs;
+}
+
+podcast('simple');
+single_page_html();
+inlined_pages_html();
+podcast('fancy');
--- /dev/null
+[[!meta enclosure="piano.mp3" enclosure="scroll.3gp"]]
+[[!meta enclosure="walter.ogg"]]
+
+this article has content _and_ only one enclosure!
--- /dev/null
+[[!inline pages="pianopost or attempted_multiple_enclosures"]]
--- /dev/null
+[[!meta enclosure="piano.mp3"]]
+
+this article has content _and_ an enclosure!
--- /dev/null
+[[!inline pages="simplepost or *.3gp or *.mov or *.mp3 or *.ogg"]]
--- /dev/null
+this article has content but no enclosure
<TMPL_IF COPYRIGHT>
[[!meta copyright="<TMPL_VAR COPYRIGHT ESCAPE=HTML>"]]
</TMPL_IF>
+<TMPL_IF AUTHOR>
+[[!meta author="<TMPL_VAR NAME ESCAPE=HTML>: <TMPL_VAR AUTHOR ESCAPE=HTML>"]]
+<TMPL_ELSE>
[[!meta author="<TMPL_VAR NAME ESCAPE=HTML>"]]
+</TMPL_IF>
[[!meta authorurl="<TMPL_VAR URL ESCAPE=HTML>"]]
<published><TMPL_VAR CDATE_3339></published>
<TMPL_IF ENCLOSURE>
<link rel="enclosure" type="<TMPL_VAR TYPE>" href="<TMPL_VAR ENCLOSURE>" length="<TMPL_VAR LENGTH>" />
-<TMPL_ELSE>
+</TMPL_IF>
+<TMPL_UNLESS SIMPLEPODCAST>
<content type="html" xml:lang="en">
<TMPL_VAR CONTENT ESCAPE=HTML>
</content>
-</TMPL_IF>
+</TMPL_UNLESS>
<TMPL_IF COMMENTSURL>
<link rel="comments" href="<TMPL_VAR COMMENTSURL>" type="text/html" />
</TMPL_IF>
<TMPL_VAR CONTENT>
<TMPL_IF HTML5></section><TMPL_ELSE></div></TMPL_IF>
+<TMPL_IF ENCLOSURE>
+<TMPL_IF HTML5><section id="inlineenclosure"><TMPL_ELSE><div id="inlineenclosure"></TMPL_IF>
+<a href="<TMPL_VAR ENCLOSURE>">Download</a>
+<TMPL_IF HTML5></section><TMPL_ELSE></div></TMPL_IF>
+</TMPL_IF>
+
<TMPL_IF HTML5><footer class="inlinefooter"><TMPL_ELSE><div class="inlinefooter"></TMPL_IF>
<span class="pagedate">
<TMPL_VAR CONTENT>
<TMPL_IF HTML5></section><TMPL_ELSE></div></TMPL_IF>
+<TMPL_IF ENCLOSURE>
+<TMPL_IF HTML5><section id="enclosure"><TMPL_ELSE><div id="enclosure"></TMPL_IF>
+<a href="<TMPL_VAR ENCLOSURE>">Download</a>
+<TMPL_IF HTML5></section><TMPL_ELSE></div></TMPL_IF>
+</TMPL_IF>
+
<TMPL_UNLESS DYNAMIC>
<TMPL_IF COMMENTS>
<TMPL_IF HTML5><section id="comments"><TMPL_ELSE><div id="comments"></TMPL_IF>
<item>
-<TMPL_IF AUTHOR>
- <title><TMPL_VAR AUTHOR ESCAPE=HTML>: <TMPL_VAR TITLE></title>
- <dcterms:creator><TMPL_VAR AUTHOR ESCAPE=HTML></dcterms:creator>
-<TMPL_ELSE>
<title><TMPL_VAR TITLE></title>
-</TMPL_IF>
<TMPL_IF GUID>
<guid isPermaLink="false"><TMPL_VAR GUID></guid>
<TMPL_ELSE>
<guid isPermaLink="false"><TMPL_VAR URL></guid>
</TMPL_IF>
<link><TMPL_VAR PERMALINK></link>
+<TMPL_IF AUTHOR>
+ <dc:creator><TMPL_VAR AUTHOR ESCAPE=HTML></dc:creator>
+</TMPL_IF>
<TMPL_IF CATEGORIES>
<TMPL_LOOP CATEGORIES>
<category><TMPL_VAR CATEGORY></category>
<dcterms:modified><TMPL_VAR MDATE_3339></dcterms:modified>
<TMPL_IF ENCLOSURE>
<enclosure url="<TMPL_VAR ENCLOSURE>" type="<TMPL_VAR TYPE>" length="<TMPL_VAR LENGTH>" />
-<TMPL_ELSE>
- <description><TMPL_VAR CONTENT ESCAPE=HTML></description>
</TMPL_IF>
+<TMPL_UNLESS SIMPLEPODCAST>
+ <description><TMPL_VAR CONTENT ESCAPE=HTML></description>
+</TMPL_UNLESS>
<TMPL_IF COMMENTSURL>
<comments><TMPL_VAR COMMENTSURL></comments>
</TMPL_IF>
<channel>
<title><TMPL_VAR TITLE></title>
<link><TMPL_VAR PAGEURL></link>
+<TMPL_IF COPYRIGHT>
+<copyright><TMPL_VAR COPYRIGHT ESCAPE=HTML></copyright>
+</TMPL_IF>
<description><TMPL_VAR FEEDDESC ESCAPE=HTML></description>
+<generator>ikiwiki</generator>
+<pubDate><TMPL_VAR FEEDDATE_822></pubDate>
<TMPL_VAR CONTENT>
</channel>
</rss>
padding: 2px;
}
-#content, #comments, #footer {
+#content, #enclosure, #comments, #footer {
margin: 1em 2em;
}
* Copyright (C) 2010 Bernd Zeimetz
* Licensed under same license as ikiwiki: GPL v2 or later */
-.page, .pageheader, .sidebar, #content, #comments, .inlinepage, .recentchanges, .pageheader .actions ul, #pagebody {
+.page, .pageheader, .sidebar, #content, #enclosure, #comments, .inlinepage, .recentchanges, .pageheader .actions ul, #pagebody {
border: none;
}
clear: none;
}
-#content a, #comments a, .sidebar a {
+#content a, #enclosure a, #comments a, .sidebar a {
color: #315485;
text-decoration: none;
font-weight: bold;
.pageheader .header span a, .pageheader .actions ul li a, .pageheader .header .parentlinks a {
color #315485;
}
- #content, #comments, #pagebody {
+ #content, #enclosure, #comments, #pagebody {
margin-right: 0;
*margin-right: 0;
border-right: none;
background-color: #f2d98d;
}
-#content a:hover, #comments a:hover, .sidebar a:hover,
-#content a:visited:hover, #comments a:visited:hover, .sidebar a:visited:hover {
+#content a:hover, #enclosure a:hover, #comments a:hover, .sidebar a:hover,
+#content a:visited:hover, #enclosure a:visited:hover, #comments a:visited:hover,
+.sidebar a:visited:hover {
color: red;
}
-#content a:visited, #comments a:visited, .sidebar a:visited {
+#content a:visited, #enclosure a:visited, #comments a:visited,
+.sidebar a:visited {
color: #37485e;
}
* thanks to <http://www.kryogenix.org/days/2002/08/30/external>
*/
#content a[href^="http:"]:after,
-#content a[href^="https:"]:after {
+#content a[href^="https:"]:after,
+#enclosure a[href^="http:"]:after,
+#enclosure a[href^="https:"]:after {
content: "↗";
}
/* you will want to replicate this for your own domain in local.css */
#content a[href^="http://localhost"]:after,
-#content a[href^="http://ikiwiki.info"]:after {
+#content a[href^="http://ikiwiki.info"]:after,
+#enclosure a[href^="http://localhost"]:after,
+#enclosure a[href^="http://ikiwiki.info"]:after {
content: none;
}