]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blobdiff - IkiWiki/Plugin/table.pm
Fix link from meta plugin to tips/howto_stop_flooding_aggregators
[git.ikiwiki.info.git] / IkiWiki / Plugin / table.pm
index 26e434b1fa2264fdb9c24ceb8031775bf49f64fe..11474c8f08412c701a58f60ce350888f84a1057a 100644 (file)
@@ -3,9 +3,8 @@ package IkiWiki::Plugin::table;
 
 use warnings;
 use strict;
-
-use IkiWiki;
-use IkiWiki::Plugin::mdwn;
+use Encode;
+use IkiWiki 2.00;
 
 sub import { #{{{
        hook(type => "preprocess", id => "table", call => \&preprocess);
@@ -23,6 +22,7 @@ sub preprocess (@) { #{{{
                        return "[[table ".gettext("cannot find file")."]]";
                }
                $params{data} = readfile(srcfile($params{file}));
+               add_depends($params{page}, $params{file});
        }
 
        if (lc $params{format} eq 'auto') {
@@ -37,10 +37,28 @@ sub preprocess (@) { #{{{
 
        my @data;
        if (lc $params{format} eq 'csv') {
-               @data=split_csv($params{data}, $params{delimiter});
+               @data=split_csv($params{data},
+                       defined $params{delimiter} ? $params{delimiter} : ",",);
+               # linkify after parsing since html link quoting can
+               # confuse CSV parsing
+               if (! exists $params{file}) {
+                       @data=map {
+                               [ map {
+                                       IkiWiki::linkify($params{page},
+                                               $params{destpage}, $_);
+                               } @$_ ]
+                       } @data;
+               }
        }
        elsif (lc $params{format} eq 'dsv') {
-               @data=split_dsv($params{data}, $params{delimiter});
+               # linkify before parsing since wikilinks can contain the
+               # delimiter
+               if (! exists $params{file}) {
+                       $params{data} = IkiWiki::linkify($params{page},
+                               $params{destpage}, $params{data});
+               }
+               @data=split_dsv($params{data},
+                       defined $params{delimiter} ? $params{delimiter} : "|",);
        }
        else {
                return "[[table ".gettext("unknown data format")."]]";
@@ -58,23 +76,12 @@ sub preprocess (@) { #{{{
        push @lines, defined $params{class}
                        ? "<table class=\"".$params{class}.'">'
                        : '<table>';
-       push @lines, "\t<thead>","\t\t<tr>",
-               (map {
-                       "\t\t\t<th>".
-                       htmlize($params{page}, $params{destpage}, $_).
-                       "</th>"
-               } @$header),
-               "\t\t</tr>", "\t</thead>" if defined $header;
-       push @lines, "\t<tbody>";
-       foreach my $record (@data) {
-               push @lines, "\t\t<tr>",
-                       (map {
-                               "\t\t\t<td>".
-                               htmlize($params{page}, $params{destpage}, $_).
-                               "</td>"
-                       } @$record),
-                       "\t\t</tr>";
-       }
+       push @lines, "\t<thead>",
+               genrow($params{page}, $params{destpage}, "th", @$header),
+               "\t</thead>" if defined $header;
+       push @lines, "\t<tbody>" if defined $header;
+       push @lines, genrow($params{page}, $params{destpage}, "td", @$_)
+               foreach @data;
        push @lines, "\t</tbody>" if defined $header;
        push @lines, '</table>';
        my $html = join("\n", @lines);
@@ -103,8 +110,9 @@ sub split_csv ($$) { #{{{
        eval q{use Text::CSV};
        error($@) if $@;
        my $csv = Text::CSV->new({ 
-               sep_char        => defined $delimiter ? $delimiter : ",",
+               sep_char        => $delimiter,
                binary          => 1,
+               allow_loose_quotes => 1,
        }) || error("could not create a Text::CSV object");
        
        my $l=0;
@@ -112,7 +120,7 @@ sub split_csv ($$) { #{{{
        foreach my $line (@text_lines) {
                $l++;
                if ($csv->parse($line)) {
-                       push(@data, [ $csv->fields() ]);
+                       push(@data, [ map { decode_utf8 $_ } $csv->fields() ]);
                }
                else {
                        debug(sprintf(gettext('parse fail at line %d: %s'), 
@@ -130,26 +138,45 @@ sub split_dsv ($$) { #{{{
 
        my @data;
        foreach my $line (@text_lines) {
-               push @data, [ split(/\Q$delimiter\E/, $line) ];
+               push @data, [ split(/\Q$delimiter\E/, $line, -1) ];
        }
     
        return @data;
 } #}}}
 
-sub htmlize ($$$){ #{{{
+sub genrow ($$$@) { #{{{
        my $page = shift;
        my $destpage = shift;
-       my $text = shift;
+       my $elt = shift;
+       my @data = @_;
+
+       my @ret;
+       push @ret, "\t\t<tr>";
+       for (my $x=0; $x < @data; $x++) {
+               my $cell=htmlize($page, $destpage, $data[$x]);
+               my $colspan=1;
+               while ($x+1 < @data && $data[$x+1] eq '') {
+                       $x++;
+                       $colspan++;
+               }
+               if ($colspan > 1) {
+                       push @ret, "\t\t\t<$elt colspan=\"$colspan\">$cell</$elt>"
+               }
+               else {
+                       push @ret, "\t\t\t<$elt>$cell</$elt>"
+               }
+       }
+       push @ret, "\t\t</tr>";
 
-       $text=IkiWiki::htmlize($page, pagetype($pagesources{$page}),
-               IkiWiki::preprocess($page, $destpage, $text));
+       return @ret;
+} #}}}
 
-       # hack to get rid of enclosing junk added by markdown
-       $text=~s!^<p>!!;
-       $text=~s!</p>$!!;
-       chomp $text;
+sub htmlize ($$$) { #{{{
+       my $page = shift;
+       my $destpage = shift;
 
-       return $text;
+       return IkiWiki::htmlize($page, $destpage, pagetype($pagesources{$page}),
+               IkiWiki::preprocess($page, $destpage, shift));
 }
 
 1