use warnings;
use strict;
use Encode;
+use Fcntl q{:flock};
use URI::Escape q{uri_escape_utf8};
use POSIX ();
use Storable;
html5 => {
type => "boolean",
default => 0,
- description => "generate HTML5?",
+ description => "use elements new in HTML5 like <section>?",
advanced => 0,
safe => 1,
rebuild => 1,
safe => 0,
rebuild => 0,
},
+ responsive_layout => {
+ type => "boolean",
+ default => 1,
+ description => "theme has a responsive layout? (mobile-optimized)",
+ safe => 1,
+ rebuild => 1,
+ },
}
sub getlibdirs () {
}
open($wikilock, '>', "$config{wikistatedir}/lockfile") ||
error ("cannot write to $config{wikistatedir}/lockfile: $!");
- if (! flock($wikilock, 2)) { # LOCK_EX
- error("failed to get lock");
+ if (! flock($wikilock, LOCK_EX | LOCK_NB)) {
+ debug("failed to get lock; waiting...");
+ if (! flock($wikilock, LOCK_EX)) {
+ error("failed to get lock");
+ }
}
return 1;
}
if (exists $params{page}) {
$page=delete $params{page};
$params{forcebaseurl}=urlto($page);
- if (! $config{html5}) {
- $params{forcebaseurl}=urlabs($params{forcebaseurl}, $topurl);
- }
}
run_hooks(pagetemplate => sub {
shift->(
templateactions($template, "");
my $baseurl = baseurl();
- if (! $config{html5}) {
- $baseurl = urlabs($baseurl, $topurl),
- }
$template->param(
dynamic => 1,
my $time=time;
my @now=localtime($time);
+my %changed;
sub import {
+ hook(type => "checkconfig", id => "calendar", call => \&checkconfig);
hook(type => "getsetup", id => "calendar", call => \&getsetup);
hook(type => "needsbuild", id => "calendar", call => \&needsbuild);
hook(type => "preprocess", id => "calendar", call => \&preprocess);
+ hook(type => "scan", id => "calendar", call => \&scan);
+ hook(type => "build_affected", id => "calendar", call => \&build_affected);
+
+ IkiWiki::loadplugin("transient");
}
sub getsetup () {
archive_pagespec => {
type => "pagespec",
example => "page(posts/*) and !*/Discussion",
- description => "PageSpec of pages to include in the archives; used by ikiwiki-calendar command",
+ description => "PageSpec of pages to include in the archives, if option `calendar_autocreate` is true.",
link => 'ikiwiki/PageSpec',
safe => 1,
rebuild => 0,
},
+ calendar_autocreate => {
+ type => "boolean",
+ example => 1,
+ description => "autocreate new calendar pages?",
+ safe => 1,
+ rebuild => undef,
+ },
+ calendar_fill_gaps => {
+ type => "boolean",
+ example => 1,
+ default => 1,
+ description => "if set, when building calendar pages, also build pages of year and month when no pages were published (building empty calendars).",
+ safe => 1,
+ rebuild => 0,
+ },
+}
+
+sub checkconfig () {
+ if (! defined $config{calendar_autocreate}) {
+ $config{calendar_autocreate} = defined $config{archivebase};
+ }
+ if (! defined $config{archive_pagespec}) {
+ $config{archive_pagespec} = '*';
+ }
+ if (! defined $config{archivebase}) {
+ $config{archivebase} = 'archives';
+ }
+ if (! defined $config{calendar_fill_gaps}) {
+ $config{calendar_fill_gaps} = 1;
+ }
}
sub is_leap_year (@) {
return $days_in_month;
}
+sub build_affected {
+ my %affected;
+ my ($ayear, $amonth, $valid);
+
+ foreach my $year (keys %changed) {
+ ($ayear, $valid) = nextyear($year, $config{archivebase});
+ $affected{calendarlink($ayear)} = sprintf(gettext("building calendar for %s, its previous or next year has changed"), $ayear) if ($valid);
+ ($ayear, $valid) = previousyear($year, $config{archivebase});
+ $affected{calendarlink($ayear)} = sprintf(gettext("building calendar for %s, its previous or next year has changed"), $ayear) if ($valid);
+ foreach my $month (keys $changed{$year}) {
+ ($ayear, $amonth, $valid) = nextmonth($year, $month, $config{archivebase});
+ $affected{calendarlink($ayear, sprintf("%02d", $amonth))} = sprintf(gettext("building calendar for %s/%s, its previous or next month has changed"), $amonth, $ayear) if ($valid);
+ ($ayear, $amonth, $valid) = previousmonth($year, $month, $config{archivebase});
+ $affected{calendarlink($ayear, sprintf("%02d", $amonth))} = sprintf(gettext("building calendar for %s/%s, its previous or next month has changed"), $amonth, $ayear) if ($valid);
+ }
+ }
+
+ return %affected;
+}
+
+sub autocreate {
+ my ($page, $pagefile, $year, $month) = @_;
+ my $message=sprintf(gettext("creating calendar page %s"), $page);
+ debug($message);
+
+ my $template;
+ if (defined $month) {
+ $template=template("calendarmonth.tmpl");
+ } else {
+ $template=template("calendaryear.tmpl");
+ }
+ $template->param(year => $year);
+ $template->param(month => $month) if defined $month;
+ $template->param(pagespec => $config{archive_pagespec});
+
+ my $dir = $IkiWiki::Plugin::transient::transientdir;
+
+ writefile($pagefile, $dir, $template->output);
+}
+
+sub calendarlink($;$) {
+ my ($year, $month) = @_;
+ if (defined $month) {
+ return $config{archivebase} . "/" . $year . "/" . $month;
+ } else {
+ return $config{archivebase} . "/" . $year;
+ }
+}
+
+sub gencalendarmonth{
+ my $year = shift;
+ my $month = sprintf("%02d", shift);
+
+ my $page = calendarlink($year, $month);
+ my $pagefile = newpagefile($page, $config{default_pageext});
+ add_autofile(
+ $pagefile, "calendar",
+ sub {return autocreate($page, $pagefile, $year, $month);}
+ );
+}
+
+sub gencalendaryear {
+ my $year = shift;
+ my %params = @_;
+
+ # Building year page
+ my $page = calendarlink($year);
+ my $pagefile = newpagefile($page, $config{default_pageext});
+ add_autofile(
+ $pagefile, "calendar",
+ sub {return autocreate($page, $pagefile, $year);}
+ );
+
+ if (not exists $wikistate{calendar}{minyear}) {
+ $wikistate{calendar}{minyear} = $year;
+ }
+ if (not exists $wikistate{calendar}{maxyear}) {
+ $wikistate{calendar}{maxyear} = $year;
+ }
+
+ if ($config{calendar_fill_gaps}) {
+ # Building month pages
+ foreach my $month (1 .. 12) {
+ gencalendarmonth($year, $month);
+ }
+
+ # Filling potential gaps in years (e.g. calendar goes from 2010 to 2014,
+ # and we just added year 2005. We have to add years 2006 to 2009).
+ return if $params{norecurse};
+ if ($wikistate{calendar}{minyear} > $year) {
+ foreach my $other ($year + 1 .. $wikistate{calendar}{minyear} - 1) {
+ gencalendaryear($other, norecurse => 1);
+ }
+ $wikistate{calendar}{minyear} = $year;
+ }
+ if ($wikistate{calendar}{maxyear} < $year) {
+ foreach my $other ($wikistate{calendar}{maxyear} + 1 .. $year - 1) {
+ gencalendaryear($other, norecurse => 1);
+ }
+ $wikistate{calendar}{maxyear} = $year;
+ }
+ }
+ if ($year < $wikistate{calendar}{minyear}) {
+ $wikistate{calendar}{minyear} = $year;
+ }
+ if ($year > $wikistate{calendar}{maxyear}) {
+ $wikistate{calendar}{maxyear} = $year;
+ }
+}
+
+sub previousmonth($$$) {
+ my $year = shift;
+ my $month = shift;
+ my $archivebase = shift;
+
+ if (not exists $wikistate{calendar}{minyear}) {
+ $wikistate{calendar}{minyear} = $year;
+ }
+
+ my $pmonth = $month;
+ my $pyear = $year;
+ while ((not exists $pagesources{"$archivebase/$pyear/" . sprintf("%02d", $pmonth)}) or ($pmonth == $month and $pyear == $year)) {
+ $pmonth -= 1;
+ if ($pmonth == 0) {
+ $pyear -= 1;
+ $pmonth = 12;
+ return ($pyear, $pmonth, 0) unless $pyear >= $wikistate{calendar}{minyear};
+ }
+ }
+ return ($pyear, $pmonth, 1);
+}
+
+sub nextmonth($$$) {
+ my $year = shift;
+ my $month = shift;
+ my $archivebase = shift;
+
+ if (not exists $wikistate{calendar}{maxyear}) {
+ $wikistate{calendar}{maxyear} = $year;
+ }
+
+ my $nmonth = $month;
+ my $nyear = $year;
+ while ((not exists $pagesources{"$archivebase/$nyear/" . sprintf("%02d", $nmonth)}) or ($nmonth == $month and $nyear == $year)) {
+ $nmonth += 1;
+ if ($nmonth == 13) {
+ $nyear += 1;
+ $nmonth = 1;
+ return ($nyear, $nmonth, 0) unless $nyear <= $wikistate{calendar}{maxyear};
+ }
+ }
+ return ($nyear, $nmonth, 1);
+}
+
+sub previousyear($$) {
+ my $year = shift;
+ my $archivebase = shift;
+
+ my $pyear = $year - 1;
+ while (not exists $pagesources{"$archivebase/$pyear"}) {
+ $pyear -= 1;
+ return ($pyear, 0) unless ($pyear >= $wikistate{calendar}{minyear});
+ }
+ return ($pyear, 1);
+}
+
+sub nextyear($$) {
+ my $year = shift;
+ my $archivebase = shift;
+
+ my $nyear = $year + 1;
+ while (not exists $pagesources{"$archivebase/$nyear"}) {
+ $nyear += 1;
+ return ($nyear, 0) unless ($nyear <= $wikistate{calendar}{maxyear});
+ }
+ return ($nyear, 1);
+}
+
sub format_month (@) {
my %params=@_;
push(@{$linkcache{"$year/$mtag/$mday"}}, $p);
}
- my $pmonth = $params{month} - 1;
- my $nmonth = $params{month} + 1;
- my $pyear = $params{year};
- my $nyear = $params{year};
-
- # Adjust for January and December
- if ($params{month} == 1) {
- $pmonth = 12;
- $pyear--;
- }
- if ($params{month} == 12) {
- $nmonth = 1;
- $nyear++;
- }
+ my $archivebase = 'archives';
+ $archivebase = $config{archivebase} if defined $config{archivebase};
+ $archivebase = $params{archivebase} if defined $params{archivebase};
+
+ my ($pyear, $pmonth, $pvalid) = previousmonth($params{year}, $params{month}, $archivebase);
+ my ($nyear, $nmonth, $nvalid) = nextmonth($params{year}, $params{month}, $archivebase);
# Add padding.
$pmonth=sprintf("%02d", $pmonth);
my $pmonthname=strftime_utf8("%B", localtime(timelocal(0,0,0,1,$pmonth-1,$pyear-1900)));
my $nmonthname=strftime_utf8("%B", localtime(timelocal(0,0,0,1,$nmonth-1,$nyear-1900)));
- my $archivebase = 'archives';
- $archivebase = $config{archivebase} if defined $config{archivebase};
- $archivebase = $params{archivebase} if defined $params{archivebase};
-
# Calculate URL's for monthly archives.
my ($url, $purl, $nurl)=("$monthname $params{year}",'','');
if (exists $pagesources{"$archivebase/$params{year}/$params{month}"}) {
sub format_year (@) {
my %params=@_;
-
+
my @post_months;
foreach my $p (pagespec_match_list($params{page},
"creation_year($params{year}) and ($params{pages})",
}
my $calendar="\n";
+
+ my $archivebase = 'archives';
+ $archivebase = $config{archivebase} if defined $config{archivebase};
+ $archivebase = $params{archivebase} if defined $params{archivebase};
- my $pyear = $params{year} - 1;
- my $nyear = $params{year} + 1;
+ my ($pyear, $pvalid) = previousyear($params{year}, $archivebase);
+ my ($nyear, $nvalid) = nextyear($params{year}, $archivebase);
my $thisyear = $now[5]+1900;
my $future_month = 0;
$future_month = $now[4]+1 if $params{year} == $thisyear;
- my $archivebase = 'archives';
- $archivebase = $config{archivebase} if defined $config{archivebase};
- $archivebase = $params{archivebase} if defined $params{archivebase};
-
# calculate URL's for previous and next years
my ($url, $purl, $nurl)=("$params{year}",'','');
if (exists $pagesources{"$archivebase/$params{year}"}) {
}
$params{month} = sprintf("%02d", $params{month});
+ $changed{$params{year}}{$params{month}} = 1;
if ($params{type} eq 'month' && $params{year} == $thisyear
&& $params{month} == $thismonth) {
}
}
}
+
return $needsbuild;
}
+sub scan (@) {
+ my %params=@_;
+ my $page=$params{page};
+
+ return unless $config{calendar_autocreate};
+
+ # Check if year pages have to be generated
+ if (pagespec_match($page, $config{archive_pagespec})) {
+ my @ctime = localtime($IkiWiki::pagectime{$page});
+ gencalendaryear($ctime[5]+1900);
+ gencalendarmonth($ctime[5]+1900, $ctime[4]+1);
+ }
+}
+
1
sub mirrorlist ($) {
my $page=shift;
- return ($config{html5} ? '<nav id="mirrorlist">' : '<div>').
+ return ($config{html5} ? '<nav' : '<div').' id="mirrorlist">'.
(keys %{$config{mirrorlist}} > 1 ? gettext("Mirrors") : gettext("Mirror")).
": ".
join(", ",
ctime => displaytime($pagectime{$page}, undef, 1),
baseurl => baseurl($page),
html5 => $config{html5},
+ responsive_layout => $config{responsive_layout},
);
run_hooks(pagetemplate => sub {
+ikiwiki (3.20141201) UNRELEASED; urgency=medium
+
+ By default, this version of IkiWiki tells mobile browsers that its
+ layout is suitable for small screens. The default layout and the
+ actiontabs, blueview, goldtype and monochrome themes have been adjusted.
+
+ If you have custom CSS that does not work in a small window (a typical
+ phone is 320 to 400 CSS pixels wide), please set the new responsive_layout
+ config option to 0.
+
+ -- Simon McVittie <smcv@debian.org> Mon, 01 Dec 2014 16:04:41 +0000
+
ikiwiki (3.20110122) unstable; urgency=low
If you have custom CSS that uses "#feedlinks" or "#blogform", you will
ikiwiki (3.20141017) UNRELEASED; urgency=medium
+ [ Joey Hess ]
* Added ikiwiki-comment program.
* Add missing build-depends on libcgi-formbuilder-perl, needed for
t/relativity.t
* Set Debian package maintainer to Simon McVittie as I'm retiring from
Debian.
+ [ Simon McVittie ]
+ * calendar: add calendar_autocreate option, with which "ikiwiki --refresh"
+ can mostly supersede the ikiwiki-calendar command.
+ Thanks, Louis Paternault
+ * search: add more classes as a hook for CSS. Thanks, sajolida
+ * core: generate HTML5 by default, but keep avoiding new elements
+ like <section> that require specific browser support unless html5 is
+ set to 1.
+ * Tell mobile browsers to draw our pages in a device-sized viewport,
+ not an 800-1000px viewport designed to emulate a desktop/laptop browser.
+ * Add new responsive_layout option which can be set to 0 if your custom
+ CSS only works in a large viewport.
+ * style.css, actiontabs, blueview, goldtype, monochrome: adjust layout
+ below 600px ("responsive layout") so that horizontal scrolling is not
+ needed on smartphone browsers or other small viewports.
+
-- Joey Hess <joeyh@debian.org> Mon, 20 Oct 2014 12:04:49 -0400
ikiwiki (3.20141016) unstable; urgency=medium
Build-Depends-Indep: dpkg-dev (>= 1.9.0), libxml-simple-perl,
libtext-markdown-discount-perl,
libtimedate-perl, libhtml-template-perl,
- libhtml-scrubber-perl, wdg-html-validator,
+ libhtml-scrubber-perl,
libhtml-parser-perl, liburi-perl (>= 1.36), perlmagick, po4a (>= 0.34),
libfile-chdir-perl, libyaml-libyaml-perl, librpc-xml-perl,
libcgi-pm-perl, libcgi-session-perl, ghostscript,
<hr>
--
2.1.1
+
+> [[Applied|done]], thanks --[[smcv]]
> a proportion of desktop browsers, rather than being responsive to window
> size like they should have been all along. --[[smcv]]
+>> Fixed in master. It is currently on by default, but people with
+>> customized CSS that doesn't scale nicely can put
+>> `responsive_layout => 0` in the config to turn it off. --[[smcv]]
+
Further more:
* fonts need to be tweaked
> anti-theme; enhancing the other themes would be OK though.)
> --[[smcv]]
+ >> With git master, the layout and font sizes seem appropriate on my
+ >> Galaxy Nexus (360 CSS pixels wide), and on simulated devices
+ >> 320 device pixels wide (e.g. the first couple of iPhone models)
+ >> in Chromium's developer tools. [[done]]
+ >>
+ >> Test sites:
+ >>
+ >> * <http://actiontabs.hosted.pseudorandom.co.uk/>
+ >> * <http://blueview.hosted.pseudorandom.co.uk/>
+ >> * <http://goldtype.hosted.pseudorandom.co.uk/>
+ >> * <http://monochrome.hosted.pseudorandom.co.uk/>
+ >> * <http://unthemed.hosted.pseudorandom.co.uk/>
+ >>
+ >> If stylistic changes are needed, please open another issue with a
+ >> suggested patch.
+ >> --[[smcv]]
+
* XHTML should be dropped !
> Already in the to-do list: [[todo/generate HTML5 by default]]. --[[smcv]]
+ >> Next version will always generate HTML5, unless someone overrules me.
+ >> --[[smcv]]
+
I'm practicing this on http://dabase.com/ with <http://source.dabase.branchable.com/?p=source.git;a=blob;f=templates/page.tmpl;>
> [[!format diff """
The content is changing frequently without being checked into the git repository. Any ideas?
--[[bastla]]
+
+> Please check your web server logs for any error messages from the CGI.
+> It seems likely that the spammer is editing that page but the changes
+> are somehow not getting committed or pushed.
+>
+> I can't tell you much without knowing details of your setup.
+> For instance, are you using the suggested git repository setup
+> shown in the diagram on the [[rcs/git]] page, or something
+> different? Can you publish a (possibly censored) setup file somewhere?
+>
+> It would probably also be worthwhile to compare the git history of
+> `srcdir/.git` with the git history of the bare repository, if you
+> have one.
+>
+> To recover, you could undo the spam in the `srcdir` (as the user ID
+> that owns the wiki), commit that, and merge with the bare repository
+> if necessary.
+>
+> ----
+>
+> When I tried editing the spammed page to clear it, I got this error:
+>
+> Error: /srv/www/Kurse/AFu-Lizenz/e09.tex independently created, not overwriting with version from Kurse/AFu-Lizenz/e09.tex
+>
+> Your srcdir and destdir seem to have got out of sync. You might need
+> to rebuild the wiki.
+>
+> (I think I'd have received the same error for *any* edit right now.)
+>
+> If you're going to enable completely anonymous editing, I
+> recommend monitoring the wiki more carefully. It might be useful
+> to enable the `syslog` option so that wiki errors go to the
+> system log.
+>
+> --[[smcv]]
--- /dev/null
+ < thm> joeyh: ping
+ < thm> can you update the embedded jquery-ui? (for cve
+ 2010-5312, and/or 2012-6662)
+
+I'll do this next time I spend some time on ikiwiki unless Joey or
+Amitai gets there first.
+
+It doesn't look as though we actually use the vulnerable functionality.
+
+--[[smcv]]
--- /dev/null
+[[!comment format=mdwn
+ username="http://www.globalclue.com/"
+ nickname="Dion"
+ subject="comment 5"
+ date="2014-12-08T17:44:49Z"
+ content="""
+I tried just now and it does work, and it is quite surprising
+"""]]
--- /dev/null
+After enabling "Show sidebar on all pages" (global_sidebars) I can no longer access the setup page via the web interface.
+
+The rest of the wiki continues to work, but only "Content-type: text/html" gets sent for this page.
+
+* Fresh ikiwiki --setup of 3.20141016-1
+* Nginx
+
+Would this be something I'm doing wrong or a bug?
--- /dev/null
+[[!comment format=mdwn
+ username="http://smcv.pseudorandom.co.uk/"
+ nickname="smcv"
+ subject="more info needed"
+ date="2014-11-27T12:14:56Z"
+ content="""
+That sounds like a bug. Please check your web server log for warnings
+or error messages from the CGI script.
+"""]]
* [[mhameed]] `git://github.com/mhameed/ikiwiki.git`
* [[spalax]] `git://github.com/paternal/ikiwiki.git` ([[browse|https://github.com/paternal/ikiwiki]])
* [[jcflack]] `git://github.com/jcflack/ikiwiki.git`
+* [[users/mjd]] `https://github.com/mjdominus/ikiwiki.git` ([[browse|https://github.com/mjdominus/ikiwiki]])
## branches
This command uses two [[templates]] to generate
the pages, `calendarmonth.tmpl` and `calendaryear.tmpl`.
+# [[plugins/calendar]] setup option
+
+Most of the goals of this command can be replaced by setting up
+`calendar_autocreate` setup option (of plugin [[plugins/calendar]]), and
+running `ikiwiki -setup you.setup`. The only thing that `ikiwiki-calendar` can
+do and that `ikiwiki` cannot is forcing page generation (using `-f` switch).
+
# AUTHOR
Joey Hess <joey@ikiwiki.info>
typically use this directive to display a calendar, and also use [[inline]]
to display or list pages created in the given time frame.
-The `ikiwiki-calendar` command can be used to automatically generate the
-archive pages. It also refreshes the wiki, updating the calendars to
-highlight the current day. This command is typically run at midnight from
-cron.
+## Generating archive pages
+
+If [[!iki plugins/calendar desc=option]] `calendar_autocreate` is not set, the
+[[!iki ikiwiki-calendar]] command can be used to automatically generate the archive
+pages. It also refreshes the wiki, updating the calendars to highlight the
+current day. This command is typically run at midnight from cron.
An example crontab:
- 0 0 * * * ikiwiki-calendar ~/ikiwiki.setup "posts/* and !*/Discussion"
+ 0 0 * * * ikiwiki-calendar ~/ikiwiki.setup "posts/* and !*/Discussion"
+
+
+With [[!iki plugins/calendar desc="setup option"]] `calendar_autocreate`,
+all this work is done by `ikiwiki` itself. Thus, the crontab command can be
+replaced by:
+
+ 0 0 * * * ikiwiki --setup ~/ikiwiki.setup --refresh
## usage
for the whole wiki by setting `archivebase` in ikiwiki's setup file.
Calendars link to pages under here, with names like "2010/04" and
"2010". These pages can be automatically created using the
- `ikiwiki-calendar` program.
+ `calendar_autocreate` [[!iki plugins/calendar desc="setup option"]].
* `year` - The year for which the calendar is requested. Defaults to the
current year. Can also use -1 to refer to last year, and so on.
* `month` - The numeric month for which the calendar is requested, in the
* [L'Altro Wiki](http://laltromondo.dynalias.net/~iki/) Tutorials, reviews, miscellaneus articles in English and Italian.
* gregoa's [p.r. - political rants](http://info.comodo.priv.at/pr/)
* [Michael Hammer](http://www.michael-hammer.at/)
-* [Richardson Family Wiki](http://the4richardsons.com) A wiki, blog or some such nonsense for the family home page or something or other... I will eventually move the rest of my sites to ikiwiki. The source of the site is in git.
* [The personal website of Andrew Back](http://carrierdetect.com)
* [Paul Elms](http://paul.elms.pro) Personal site and blog in russian.
* [Salient Dream](http://www.salientdream.com/) - All Things Strange.
* [KheOps's blog](https://w.ceops.eu/words/)
* [Stig Sandbeck Mathisen](http://fnord.no/) - Personal site and blog, with a bootstrap theme, and varnish frontend.
* Kalle Söderman: [Seen Architecture](http://img.kalleswork.net), [Stockholm Project](http://stockholm.kalleswork.net) - Mainly -image galleries using the album and osm plugins with a customized html5 theme.
-
+* [James Technotes](http://jamestechnotes.com), my [wiki](http://jamestechnotes.com) and [blog](http://jamestechnotes.com/blog).
The directive displays a calendar, similar to the typical calendars shown on
some blogs.
-The [[ikiwiki-calendar]] command is used to keep the calendar up-to-date.
+The [[ikiwiki-calendar]] command is used to force generating year and month
+pages from templates (overriding the existing ones).
+
+## Setup options
+
+* `archivebase` - Default value for [[ikiwiki/directive/calendar]] directive
+ option of the same name.
+* `archive_pagespec` - [[ikiwiki/PageSpec]] of pages to include in the
+ archives, if option `calendar_autocreate` is on. It defaults to `*`.
+* `calendar_autocreate` - Control whether new archive pages are created as
+ needed. It defaults to being done only if option `archivebase` is set.
+* `calendar_fill_gaps` - If set (and `calendar_autocreate` is set as well),
+ build calendar pages of empty years and months (but does not build pages older
+ than the older page, and younger than the younger page of the pagespec). If
+ not, those empty calendar pages will be skipped. *Please note:*
+ * The archive pages will not be automatically updated if this option changes.
+ It is up to the user to delete relevant pages, and rebuild the wiki.
+ * When `calendar_fill_gaps` is set, and a post is deleted, making the
+ corresponding year/month empty, the corresponding page is left, and shows
+ an empty calendar. This is on purpose, not to break any external link
+ pointing to this particular page. If you do not like it, delete the
+ relevant pages, and rebuild the wiki.
## CSS
.trailsep {
display: none;
}
+
+/* mobile/small-screen-friendly layout */
+@media (max-width: 600px) {
+ .sidebar {
+ width: auto;
+ float: none;
+ margin-top: 0;
+ border: none;
+ }
+
+ /* if the mobile browser is new enough, use flex layout to shuffle
+ * the sidebar to the end */
+ .page {
+ display: -webkit-box;
+ display: -webkit-flexbox;
+ display: -webkit-flex;
+ display: -moz-box;
+ display: -ms-flexbox;
+ display: flex;
+ -webkit-box-orient: vertical;
+ -webkit-flex-direction: tb;
+ -webkit-flex-direction: column;
+ -webkit-flex-flow: column;
+ -ms-flex-direction: column;
+ flex-direction: column;
+ }
+ #pageheader {
+ -webkit-box-ordinal-group: -1;
+ -webkit-order: -1;
+ -ms-box-ordinal-group: -1;
+ -ms-flex-order: -1;
+ order: -1;
+ }
+ .sidebar, #footer {
+ -webkit-box-ordinal-group: 1;
+ -webkit-order: 1;
+ -ms-box-ordinal-group: 1;
+ -ms-flex-order: 1;
+ order: 1;
+ }
+
+ .blogform, #blogform {
+ padding: 4px 4px;
+ }
+}
* Jon Dowland
* Amitai Schlair
* Luca Capello
+* Patrick ZAJDA
(Note that this page is locked to prevent anyone from tampering with the PayPal link.
If you prefer your donation *not* be listed here, let [[Joey]] know.)
> it ikiwiki. --[[Joey]]
Thanks a lot Joey. :-)
+
+Hi
+
+I installed version 3.20141016 as described, and it works mostly. However to get [[!iki plugins/sparkline]] to work properly I had to add
+
+[[!format php """
+set_include_path(get_include_path() . PATH_SEPARATOR . '/home/private/src/sparkline-php-0.2/lib');
+require_once('/home/private/src/sparkline-php-0.2/lib/Sparkline_$style.php');
+"""]]
+
+to `sparkline.pm` around line 38. This is mostly likely because
+
+1. I didn't install sparkline into php's default path.
+1. I don't really know php.
+1. I don't have the time (right now) to sort out the proper way to do this.
+
+I also haven't sorted out how to make [[!iki ikiwiki/directive/format]] fully functional. I suspect I don't have everything installed on the host. -- [[james]]
> > ```
> >
> > ... maybe a bug in Discount... --[[anarcat]]
+
+> > I strongly support this feature as well! --[[integrii]]
That seemed to be ok for reviewing [[bugs/CGI wrapper doesn't store PERL5LIB environment variable]], so I hope it's ok for this one too. If another way would be preferable, please let me know.
-- [[jcflack]]
+
+> This is less about what plugins need, and more about what is safe.
+> If an environment variable is unsafe (in the sense of "can make a
+> setuid executable change its behaviour in dangerous ways") then we
+> must not pass it through, however desirable it might be.
+>
+> Because the only safe thing we can do is a whitelist, the list
+> is secondarily about what plugins need: if nothing needs a variable,
+> we don't pass it through.
+>
+> However, if a particular variable is safe, then it's always safe;
+> so if any plugin needs something, we might as well just put it in
+> the big list of things to keep. (In other words, any change to this
+> list is already security-sensitive.)
+>
+> As such, and because importing CGI into Setup pulls in a bunch
+> of extra code that is normally only imported when we are actually
+> running as a CGI, it might make more sense to have the "master list"
+> stay in Wrapper.
+>
+> What variables would `signinview` need? Can we just add them to
+> the list and skip the complexity of per-plugin configurability?
+>
+> Sorting the list makes sense to me, and so does adding the RFC 3875 set.
+>
+> [[!format txt """
+This change does seem to have exposed a thing where various plugins that
+call checksessionexpiry() (in CGI.pm) have been supplying one more argument
+than its prototype allows ... for years ...
+"""]]
+>
+> I fixed that in ikiwiki 3.20141016. Please don't add the extra ignored
+> parameter to the prototype.
+>
+> [[!format diff """
++ if ( $config{needenvkeys} ) {
+"""]]
+>
+> If this is needed at all, you should include this in the master list of
+> setup keys in IkiWiki.pm so it's documentable. Please mention setuid
+> in the description: "environment variables that are safe to pass through
+> a setuid wrapper" or something.
+>
+> I think it's `safe => 0, advanced => 1`.
+>
+> `preserve_env` or `env_keep` (or without the underscore, as you prefer)
+> might be better names for it (terminology stolen from `debuild` and `sudo`
+> respectively).
+>
+> --[[smcv]]
Here is a patch that makes [[ikiwiki-calendar]] almost useless.
+> [[merged|done]], thanks! --[[smcv]]
+
It adds some options, the main one being `calendar_autocreate`, which is
similar to the `tag_autocreate` option of the [[tag|plugins/tag]]: it create
archive pages when needed.
branch. I'll apply this at some point if there are no objections.
--[[smcv]]
+
+> [[merged|done]] --[[smcv]]
[[DavidBremner]]
+> I would like this feature too, for the very same reasons.
+>
+> To preserve backward compatibility, I tried to implement it in the following way: if `libdir` is a string, it is (as it is right now), a directory in which plugins can be searched; if `libdir` is an array of strings, it is a list of libdirs. The ideal place to put it in would be in subroutine [checkconfig](http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=IkiWiki.pm;hb=56f8223f9594ae687099dada0c138d669a6f931f#l569). However, plugins are loaded (and option `libdir` is used) in subroutine [loadplugins](http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=IkiWiki.pm;hb=56f8223f9594ae687099dada0c138d669a6f931f#l713), which is called [just before `checkconfig`](http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=ikiwiki.in;hb=729991564ec7e1116fc023c51e73b47af8b6fce7#l143).
+>
+> A solution would be to check `libdir` (and turn it into a list if necessary) somewhere in subroutine [getconfig](http://source.ikiwiki.branchable.com/?p=source.git;a=blob;f=ikiwiki.in;hb=729991564ec7e1116fc023c51e73b47af8b6fce7#l26), but I do not know where to put it not to make it look like a bad hack…
+>
+> Any idea about the best place to preprocess `libdir`? Or any better idea to implement this?
+>
+> [[Louis|spalax]]
+
+>> Modifying `getconfig` is not a valid solution, because IkiWiki.pm is also imported by
+>> [[ikiwiki-transition]], [[ikiwiki-calendar]], the regression tests, etc.
+>>
+>> The way I would personally do it is to have a new non-exported function `getlibdirs`
+>> or something, have it do something like this:
+>>
+>> if (! ref $config{libdir}) {
+>> if (length $config{libdir}) {
+>> $config{libdir} = [$config{libdir}];
+>> } else {
+>> $config{libdir} = [];
+>> }
+>> }
+>> return @{$config{libdir}};
+>>
+>> and replace all uses of $config{libdir} with it.
+>>
+>> --[[smcv]]
+>>>
+>>> I implemented it (see branch ``paternal/libdirs``). I used [[smcv]]'s idea, but
+>>> avoiding side effects. I edited documentation as well. As usual, as neither
+>>> English nor Perl are my first languages (damn! I would be so much more
+>>> efficient in Python) feel free to improve my [[!taglink patch]].
+>>> [[!template id=gitbranch branch=spalax/paternal/libdirs browse="https://github.com/paternal/ikiwiki/tree/paternal/libdirs" author="[[Louis|spalax]]"]]
+
+
[[!taglink wishlist]]
--- /dev/null
+Runs ikiwiki on his home page at [[http://jamestechnotes.com]] and can be reached at [[james@jamestechnotes.com]]
--- /dev/null
+Mark Jason Dominus / `yrlnry` (on IRC) / [[Ikiwiki git repo|https://github.com/mjdominus/ikiwiki]]
+
+* [[blog|http://blog.plover.com/]]
+* [[CPAN|https://metacpan.org/author/MJD]]
+* [[math.stackexchange|http://math.stackexchange.com/users/25554/mjd]]
my $dir=IkiWiki::dirname($pagefile);
$dir="." unless length $dir;
my $page=IkiWiki::basename($pagefile);
-$page=~s/\.[^.]+$//;
+if (! -d $pagefile) {
+ $page=~s/\.[^.]+$//;
+}
IkiWiki::Plugin::comments::checkconfig();
my $comment_num=1 + IkiWiki::Plugin::comments::num_comments($page, $dir);
+++ /dev/null
-#!/usr/bin/perl
-use warnings;
-use strict;
-use Test::More;
-
-my @pages;
-
-BEGIN {
- @pages=qw(index features news plugins/map security);
- if (system("command -v validate >/dev/null") != 0) {
- plan skip_all => "html validator not present";
- }
- else {
- plan(tests => int @pages + 2);
- }
- use_ok("IkiWiki");
-}
-
-# Have to build the html pages first.
-# Note that just building them like this doesn't exersise all the possible
-# html that can be generated, in particular it misses some of the action
-# links at the top, etc.
-ok(system("make >/dev/null") == 0);
-
-foreach my $page (@pages) {
- print "# Validating $page\n";
- ok(system("validate html/$page.html") == 0);
-}
-
-# TODO: validate form output html
ok(! system("cp t/img/redsquare.png t/tmp/in/redsquare.png"));
if ($SVGS_WORK) {
- writefile("emptysquare.svg", "t/tmp/in", '<svg width="30" height="30"/>');
+ writefile("emptysquare.svg", "t/tmp/in",
+ '<svg width="30" height="30"><rect x="0" y="0" width="30" height="30" fill="blue"/></svg>');
}
# using different image sizes for different pages, so the pagenumber selection can be tested easily
$w3mmodeline
cgi_wrapper: t/tmp/ikiwiki.cgi
cgi_wrappermode: 0754
-html5: $args{html5}
# make it easier to test previewing
add_plugins:
- anonok
sub test_site1_perfectly_ordinary_ikiwiki {
write_setup_file(
- html5 => 0,
url => "http://example.com/wiki/",
cgiurl => "http://example.com/cgi-bin/ikiwiki.cgi",
);
# url and cgiurl are on the same host so the cgiurl is host-relative
check_generated_content(qr{<a[^>]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"});
my %bits = parse_cgi_content(run_cgi());
- is($bits{basehref}, "http://example.com/wiki/");
+ like($bits{basehref}, qr{^(?:(?:http:)?//example\.com)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:http:)?//example.com)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:http:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
# when accessed via HTTPS, links are secure
%bits = parse_cgi_content(run_cgi(is_https => 1));
- is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
# when accessed via a different hostname, links stay on that host
%bits = parse_cgi_content(run_cgi(HTTP_HOST => 'staging.example.net'));
- is($bits{basehref}, "http://staging.example.net/wiki/");
+ like($bits{basehref}, qr{^(?:(?:http:)?//staging\.example\.net)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:http:)?//staging.example.net)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:http:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$});
# previewing a page
%bits = parse_cgi_content(run_cgi(is_preview => 1));
- is($bits{basehref}, "http://example.com/wiki/a/b/c/");
+ like($bits{basehref}, qr{^(?:(?:http:)?//example\.com)?/wiki/a/b/c/$});
like($bits{stylehref}, qr{^(?:(?:http:)?//example.com)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
like($bits{cgihref}, qr{^(?:(?:http:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
-
- # in html5, the <base> is allowed to be relative, and we take full
- # advantage of that
- write_setup_file(
- html5 => 1,
- url => "http://example.com/wiki/",
- cgiurl => "http://example.com/cgi-bin/ikiwiki.cgi",
- );
- thoroughly_rebuild();
- check_cgi_mode_bits();
- # url and cgiurl are on the same host so the cgiurl is host-relative
- check_generated_content(qr{<a[^>]+href="/cgi-bin/ikiwiki.cgi\?do=prefs"});
-
- %bits = parse_cgi_content(run_cgi());
- is($bits{basehref}, "/wiki/");
- is($bits{stylehref}, "/wiki/style.css");
- is($bits{tophref}, "/wiki/");
- is($bits{cgihref}, "/cgi-bin/ikiwiki.cgi");
-
- # when accessed via HTTPS, links are secure - this is easy because under
- # html5 they're independent of the URL at which the CGI was accessed
- %bits = parse_cgi_content(run_cgi(is_https => 1));
- is($bits{basehref}, "/wiki/");
- is($bits{stylehref}, "/wiki/style.css");
- is($bits{tophref}, "/wiki/");
- is($bits{cgihref}, "/cgi-bin/ikiwiki.cgi");
-
- # when accessed via a different hostname, links stay on that host -
- # this is really easy in html5 because we can use relative URLs
- %bits = parse_cgi_content(run_cgi(HTTP_HOST => 'staging.example.net'));
- is($bits{basehref}, "/wiki/");
- is($bits{stylehref}, "/wiki/style.css");
- is($bits{tophref}, "/wiki/");
- is($bits{cgihref}, "/cgi-bin/ikiwiki.cgi");
-
- # previewing a page
- %bits = parse_cgi_content(run_cgi(is_preview => 1));
- is($bits{basehref}, "/wiki/a/b/c/");
- is($bits{stylehref}, "/wiki/style.css");
- like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
- is($bits{cgihref}, "/cgi-bin/ikiwiki.cgi");
}
sub test_site2_static_content_and_cgi_on_different_servers {
write_setup_file(
- html5 => 0,
url => "http://static.example.com/",
cgiurl => "http://cgi.example.com/ikiwiki.cgi",
);
check_generated_content(qr{<a[^>]+href="(?:http:)?//cgi.example.com/ikiwiki.cgi\?do=prefs"});
my %bits = parse_cgi_content(run_cgi(SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com'));
- like($bits{basehref}, qr{^http://static.example.com/$});
+ like($bits{basehref}, qr{^(?:(?:http:)?//static.example.com)?/$});
like($bits{stylehref}, qr{^(?:(?:http:)?//static.example.com)?/style.css$});
like($bits{tophref}, qr{^(?:http:)?//static.example.com/$});
like($bits{cgihref}, qr{^(?:(?:http:)?//cgi.example.com)?/ikiwiki.cgi$});
# when accessed via HTTPS, links are secure
%bits = parse_cgi_content(run_cgi(is_https => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com'));
- like($bits{basehref}, qr{^https://static.example.com/$});
+ like($bits{basehref}, qr{^(?:https:)?//static\.example\.com/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//static.example.com)?/style.css$});
like($bits{tophref}, qr{^(?:https:)?//static.example.com/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//cgi.example.com)?/ikiwiki.cgi$});
# when accessed via a different hostname, links to the CGI (only) should
# stay on that host?
%bits = parse_cgi_content(run_cgi(is_preview => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'staging.example.net'));
- like($bits{basehref}, qr{^http://static.example.com/a/b/c/$});
+ like($bits{basehref}, qr{^(?:http:)?//static\.example\.com/a/b/c/$});
like($bits{stylehref}, qr{^(?:(?:http:)?//static.example.com|\.\./\.\./\.\.)/style.css$});
like($bits{tophref}, qr{^(?:(?:http:)?//static.example.com|\.\./\.\./\.\.)/$});
like($bits{cgihref}, qr{^(?:(?:http:)?//(?:staging\.example\.net|cgi\.example\.com))?/ikiwiki.cgi$});
local $TODO = "use self-referential CGI URL?";
like($bits{cgihref}, qr{^(?:(?:http:)?//staging.example.net)?/ikiwiki.cgi$});
}
-
- write_setup_file(
- html5 => 1,
- url => "http://static.example.com/",
- cgiurl => "http://cgi.example.com/ikiwiki.cgi",
- );
- thoroughly_rebuild();
- check_cgi_mode_bits();
- # url and cgiurl are not on the same host so the cgiurl has to be
- # protocol-relative or absolute
- check_generated_content(qr{<a[^>]+href="(?:http:)?//cgi.example.com/ikiwiki.cgi\?do=prefs"});
-
- %bits = parse_cgi_content(run_cgi(SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com'));
- is($bits{basehref}, "//static.example.com/");
- is($bits{stylehref}, "//static.example.com/style.css");
- is($bits{tophref}, "//static.example.com/");
- is($bits{cgihref}, "//cgi.example.com/ikiwiki.cgi");
-
- # when accessed via HTTPS, links are secure - in fact they're exactly the
- # same as when accessed via HTTP
- %bits = parse_cgi_content(run_cgi(is_https => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'cgi.example.com'));
- is($bits{basehref}, "//static.example.com/");
- is($bits{stylehref}, "//static.example.com/style.css");
- is($bits{tophref}, "//static.example.com/");
- is($bits{cgihref}, "//cgi.example.com/ikiwiki.cgi");
-
- # when accessed via a different hostname, links to the CGI (only) should
- # stay on that host?
- %bits = parse_cgi_content(run_cgi(is_preview => 1, SCRIPT_NAME => '/ikiwiki.cgi', HTTP_HOST => 'staging.example.net'));
- is($bits{basehref}, "//static.example.com/a/b/c/");
- is($bits{stylehref}, "//static.example.com/style.css");
- is($bits{tophref}, "../../../");
- like($bits{cgihref}, qr{//(?:staging\.example\.net|cgi\.example\.com)/ikiwiki\.cgi});
- TODO: {
- local $TODO = "use self-referential CGI URL maybe?";
- is($bits{cgihref}, "//staging.example.net/ikiwiki.cgi");
- }
}
sub test_site3_we_specifically_want_everything_to_be_secure {
write_setup_file(
- html5 => 0,
url => "https://example.com/wiki/",
cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi",
);
# when accessed via HTTPS, links are secure
my %bits = parse_cgi_content(run_cgi(is_https => 1));
- is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
# when accessed via a different hostname, links stay on that host
%bits = parse_cgi_content(run_cgi(is_https => 1, HTTP_HOST => 'staging.example.net'));
- is($bits{basehref}, "https://staging.example.net/wiki/");
+ like($bits{basehref}, qr{^(?:(?:https:)?//staging\.example\.net)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//staging.example.net)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$});
# previewing a page
%bits = parse_cgi_content(run_cgi(is_preview => 1, is_https => 1));
- is($bits{basehref}, "https://example.com/wiki/a/b/c/");
+ like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/a/b/c/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
-
- # not testing html5: 0 here because that ends up identical to site 1
}
sub test_site4_cgi_is_secure_static_content_doesnt_have_to_be {
# (NetBSD wiki)
write_setup_file(
- html5 => 0,
url => "http://example.com/wiki/",
cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi",
);
# when accessed via HTTPS, links are secure (to avoid mixed-content)
my %bits = parse_cgi_content(run_cgi(is_https => 1));
- is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
- # when not accessed via HTTPS, ???
+ # FIXME: when not accessed via HTTPS, should the static content be
+ # forced to https anyway? For now we accept either
%bits = parse_cgi_content(run_cgi());
- like($bits{basehref}, qr{^https?://example.com/wiki/$});
+ like($bits{basehref}, qr{^(?:(?:https?)?://example\.com)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:https?:)?//example.com)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:(?:https?://example.com)?/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
%bits = parse_cgi_content(run_cgi(is_https => 1, HTTP_HOST => 'staging.example.net'));
# because the static and dynamic stuff is on the same server, we assume that
# both are also on the staging server
- like($bits{basehref}, qr{^https://staging.example.net/wiki/$});
+ like($bits{basehref}, qr{^(?:(?:https:)?//staging\.example\.net)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//staging.example.net)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:(?:(?:https:)?//staging.example.net)?/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//(?:staging\.example\.net|example\.com))?/cgi-bin/ikiwiki.cgi$});
# previewing a page
%bits = parse_cgi_content(run_cgi(is_preview => 1, is_https => 1));
- is($bits{basehref}, "https://example.com/wiki/a/b/c/");
+ like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/a/b/c/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
-
- write_setup_file(
- html5 => 1,
- url => "http://example.com/wiki/",
- cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi",
- );
- thoroughly_rebuild();
- check_cgi_mode_bits();
- # url and cgiurl are on the same host but different schemes
- check_generated_content(qr{<a[^>]+href="https://example.com/cgi-bin/ikiwiki.cgi\?do=prefs"});
-
- # when accessed via HTTPS, links are secure (to avoid mixed-content)
- %bits = parse_cgi_content(run_cgi(is_https => 1));
- is($bits{basehref}, "/wiki/");
- is($bits{stylehref}, "/wiki/style.css");
- is($bits{tophref}, "/wiki/");
- like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
-
- # when not accessed via HTTPS, ???
- %bits = parse_cgi_content(run_cgi());
- like($bits{basehref}, qr{^(?:https?://example.com)?/wiki/$});
- like($bits{stylehref}, qr{^(?:(?:https?:)?//example.com)?/wiki/style.css$});
- like($bits{tophref}, qr{^(?:(?:https?://example.com)?/wiki|\.)/$});
- like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
-
- # when accessed via a different hostname, links stay on that host
- %bits = parse_cgi_content(run_cgi(is_https => 1, HTTP_HOST => 'staging.example.net'));
- # because the static and dynamic stuff is on the same server, we assume that
- # both are also on the staging server
- is($bits{basehref}, "/wiki/");
- is($bits{stylehref}, "/wiki/style.css");
- like($bits{tophref}, qr{^(?:/wiki|\.)/$});
- like($bits{cgihref}, qr{^(?:(?:https:)?//(?:example\.com|staging\.example\.net))?/cgi-bin/ikiwiki.cgi$});
- TODO: {
- local $TODO = "this should really point back to itself but currently points to example.com";
- like($bits{cgihref}, qr{^(?:(?:https:)?//staging.example.net)?/cgi-bin/ikiwiki.cgi$});
- }
-
- # previewing a page
- %bits = parse_cgi_content(run_cgi(is_preview => 1, is_https => 1));
- is($bits{basehref}, "/wiki/a/b/c/");
- is($bits{stylehref}, "/wiki/style.css");
- like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
- like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
-
- # Deliberately not testing https static content with http cgiurl,
- # because that makes remarkably little sense.
}
sub test_site5_w3mmode {
# as documented in [[w3mmode]]
write_setup_file(
- html5 => 0,
url => undef,
cgiurl => "ikiwiki.cgi",
w3mmode => 1,
like($bits{cgihref}, qr{^(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi$});
like($bits{basehref}, qr{^(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out/$});
like($bits{stylehref}, qr{^(?:(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out|\.)/style.css$});
-
- write_setup_file(
- html5 => 1,
- url => undef,
- cgiurl => "ikiwiki.cgi",
- w3mmode => 1,
- );
- thoroughly_rebuild();
- check_cgi_mode_bits();
- # FIXME: does /$LIB/ikiwiki-w3m.cgi work under w3m?
- check_generated_content(qr{<a[^>]+href="(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi\?do=prefs"});
-
- %bits = parse_cgi_content(run_cgi(PATH_INFO => '/ikiwiki.cgi', SCRIPT_NAME => '/cgi-bin/ikiwiki-w3m.cgi'));
- like($bits{tophref}, qr{^(?:\Q$pwd\E/t/tmp/out|\.)/$});
- like($bits{cgihref}, qr{^(?:file://)?/\$LIB/ikiwiki-w3m.cgi/ikiwiki.cgi$});
- like($bits{basehref}, qr{^(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out/$});
- like($bits{stylehref}, qr{^(?:(?:(?:file:)?//)?\Q$pwd\E/t/tmp/out|\.)/style.css$});
}
sub test_site6_behind_reverse_proxy {
write_setup_file(
- html5 => 0,
url => "https://example.com/wiki/",
cgiurl => "https://example.com/cgi-bin/ikiwiki.cgi",
reverse_proxy => 1,
my %bits = parse_cgi_content(run_cgi(HTTP_HOST => 'localhost'));
like($bits{tophref}, qr{^(?:/wiki|\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
- is($bits{basehref}, "https://example.com/wiki/");
+ like($bits{basehref}, qr{^(?:(?:https:)?//example\.com)?/wiki/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
# previewing a page
%bits = parse_cgi_content(run_cgi(is_preview => 1, HTTP_HOST => 'localhost'));
like($bits{tophref}, qr{^(?:/wiki|\.\./\.\./\.\.)/$});
like($bits{cgihref}, qr{^(?:(?:https:)?//example.com)?/cgi-bin/ikiwiki.cgi$});
- is($bits{basehref}, "https://example.com/wiki/a/b/c/");
+ like($bits{basehref}, qr{^(?:(?:https)?://example\.com)?/wiki/a/b/c/$});
like($bits{stylehref}, qr{^(?:(?:https:)?//example.com)?/wiki/style.css$});
-
- # not testing html5: 1 because it would be the same as site 1 -
- # the reverse_proxy config option is unnecessary under html5
}
test_startup();
--- /dev/null
+#!/usr/bin/perl
+use warnings;
+use strict;
+use Cwd qw();
+use File::Find;
+use Test::More;
+
+plan(skip_all => "XML::Parser not available")
+ unless eval q{use XML::Parser (); 1;};
+
+use IkiWiki;
+
+ok(system("make >/dev/null") == 0);
+
+chdir("html") || die "chdir: $!";
+
+sub wanted {
+ my $file = $_;
+ return if -d $file;
+ $file =~ s{^\./}{};
+ return if $file !~ m/\.html$/;
+ if (eval {
+ XML::Parser->new()->parsefile($file);
+ 1;
+ }) {
+ pass($file);
+ }
+ elsif ($file =~ m{^(?:
+ # user-contributed, contains explicit <br>
+ plugins/contrib/gallery |
+ # use templatebody when branchable.com has been upgraded
+ templates/ |
+ # malformed content in <pre> not escaped by discount
+ tips/convert_mediawiki_to_ikiwiki
+ # user-contributed, content is anyone's guess
+ users/ |
+ )}x) {
+ TODO: {
+ local $TODO = $@;
+ fail($file);
+ }
+ }
+}
+
+find({
+ no_chdir => 1,
+ wanted => \&wanted,
+}, '.');
+
+done_testing;
<div>
<input name="sitesearch" value="<TMPL_VAR URL>" type="hidden" />
<input name="q" value="" id="searchbox" size="16" maxlength="255" type="text"
- <TMPL_IF HTML5>placeholder="search"</TMPL_IF> />
+ placeholder="search" />
</div>
</form>
-<TMPL_IF HTML5><!DOCTYPE html>
-<html>
-<TMPL_ELSE><!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
- "http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
+<!DOCTYPE html>
<html xmlns="http://www.w3.org/1999/xhtml">
-</TMPL_IF>
<head>
<TMPL_IF DYNAMIC>
<TMPL_IF FORCEBASEURL><base href="<TMPL_VAR FORCEBASEURL>" /><TMPL_ELSE>
</TMPL_IF>
<TMPL_IF HTML5><meta charset="utf-8" /><TMPL_ELSE><meta http-equiv="Content-Type" content="text/html; charset=utf-8" /></TMPL_IF>
<title><TMPL_VAR TITLE></title>
+<TMPL_IF RESPONSIVE_LAYOUT><meta name="viewport" content="width=device-width, initial-scale=1" /></TMPL_IF>
<TMPL_IF FAVICON>
<link rel="icon" href="<TMPL_VAR BASEURL><TMPL_VAR FAVICON>" type="image/x-icon" />
</TMPL_IF>
<div id="pagebody">
-<TMPL_IF HTML5><section id="content" role="main"><TMPL_ELSE><div id="content"></TMPL_IF>
+<TMPL_IF HTML5><section<TMPL_ELSE><div</TMPL_IF> id="content" role="main">
<TMPL_VAR CONTENT>
<TMPL_IF HTML5></section><TMPL_ELSE></div></TMPL_IF>
<TMPL_UNLESS DYNAMIC>
<TMPL_IF COMMENTS>
-<TMPL_IF HTML5><section id="comments" role="complementary"><TMPL_ELSE><div id="comments"></TMPL_IF>
+<TMPL_IF HTML5><section<TMPL_ELSE><div</TMPL_IF> id="comments" role="complementary">
<TMPL_VAR COMMENTS>
<TMPL_IF ADDCOMMENTURL>
<div class="addcomment">
</div>
-<TMPL_IF HTML5><footer id="footer" class="pagefooter" role="contentinfo"><TMPL_ELSE><div id="footer" class="pagefooter"></TMPL_IF>
+<TMPL_IF HTML5><footer<TMPL_ELSE><div</TMPL_IF> id="footer" class="pagefooter" role="contentinfo">
<TMPL_UNLESS DYNAMIC>
<TMPL_IF HTML5><nav id="pageinfo"><TMPL_ELSE><div id="pageinfo"></TMPL_IF>
<form method="get" action="<TMPL_VAR SEARCHACTION>" id="searchform">
<div>
<input type="text" id="searchbox" name="P" value="" size="16"
-<TMPL_IF HTML5>placeholder="search"</TMPL_IF> />
+ placeholder="search" />
</div>
</form>
<FORM NAME=P METHOD=GET
ACTION="$html{$env{CGIURL}}" TARGET="_top">
-<div style="text-align:center">
-<INPUT NAME=P VALUE="$html{$query}" SIZE=65>
+<div class="searchquery" style="text-align:center">
+<INPUT class="searchbox" NAME=P VALUE="$html{$query}" SIZE=65>
<INPUT TYPE=SUBMIT VALUE="Search">
$env{HELPLINK}
<hr>
/* allow space for the action tabs */
margin-bottom: 2em;
}
+
+@media (max-width: 600px) {
+ #content, #enclosure, #comments, #footer {
+ margin: 0.5em;
+ }
+ .pageheader .actions ul li {
+ padding: .1em .2em 0 .2em;
+ font-size: 0.8em;
+ }
+}
}
+/* lose the border on mobile */
+@media (max-width: 600px) {
+ body {
+ padding: 0;
+ }
+ .page {
+ border: none;
+ margin: 0;
+ }
+ #pagebody {
+ margin: auto;
+ border: none;
+ padding: 0.5em;
+ }
+ #footer {
+ padding: 0.5em;
+ }
+}
+
+/* cancel the minimum width if it would mean scrollbars */
+@media (max-width: 850px) {
+ .page {
+ width: auto;
+ min-width: 0;
+ padding: 0;
+ }
+}
margin-left: auto;
margin-right: auto;
width: 48em;
+ max-width: 95%;
background: url(gradient.png) repeat-x white 0px -16px;
margin-top: 48px; /* height of gradient.png that we want to see */