]> git.vanrenterghem.biz Git - git.ikiwiki.info.git/blobdiff - IkiWiki/Plugin/search.pm
Oops, use the more recent (and less brittle) directory test.
[git.ikiwiki.info.git] / IkiWiki / Plugin / search.pm
index 0d4a8f552abb9bf7f38d7e1112e1278308df1353..393c17e0fe8cf6b9ec137ba7cfc8ac64e008cd4b 100644 (file)
@@ -4,42 +4,46 @@ package IkiWiki::Plugin::search;
 
 use warnings;
 use strict;
 
 use warnings;
 use strict;
-use IkiWiki 2.00;
+use IkiWiki 3.00;
 
 
-sub import { #{{{
+sub import {
+       hook(type => "getsetup", id => "search", call => \&getsetup);
        hook(type => "checkconfig", id => "search", call => \&checkconfig);
        hook(type => "pagetemplate", id => "search", call => \&pagetemplate);
        hook(type => "checkconfig", id => "search", call => \&checkconfig);
        hook(type => "pagetemplate", id => "search", call => \&pagetemplate);
-       # run last so other needsbuild hooks can modify the list
-       hook(type => "needsbuild", id => "search", call => \&needsbuild,
-               last => 1);
-       hook(type => "filter", id => "search", call => \&filter);
+       hook(type => "postscan", id => "search", call => \&index);
        hook(type => "delete", id => "search", call => \&delete);
        hook(type => "cgi", id => "search", call => \&cgi);
        hook(type => "delete", id => "search", call => \&delete);
        hook(type => "cgi", id => "search", call => \&cgi);
-} # }}}
-
-sub checkconfig () { #{{{
+}
+
+sub getsetup () {
+       return
+               plugin => {
+                       safe => 1,
+                       rebuild => 1,
+               },
+               omega_cgi => {
+                       type => "string",
+                       example => "/usr/lib/cgi-bin/omega/omega",
+                       description => "path to the omega cgi program",
+                       safe => 0, # external program
+                       rebuild => 0,
+               },
+}
+
+sub checkconfig () {
        foreach my $required (qw(url cgiurl)) {
                if (! length $config{$required}) {
        foreach my $required (qw(url cgiurl)) {
                if (! length $config{$required}) {
-                       error(sprintf(gettext("Must specify %s when using the search plugin"), $required));
+                       error(sprintf(gettext("Must specify %s when using the %s plugin"), $required, 'search'));
                }
        }
                }
        }
-
-       if (! exists $config{omega_cgi}) {
-               $config{omega_cgi}="/usr/lib/cgi-bin/omega/omega";
-       }
        
        
-       if (! -e $config{wikistatedir}."/xapian" || $config{rebuild}) {
-               writefile("omega.conf", $config{wikistatedir}."/xapian",
-                       "database_dir .\n".
-                       "template_dir ./templates\n");
-               writefile("query", $config{wikistatedir}."/xapian/templates",
-                       IkiWiki::misctemplate(gettext("search"),
-                               readfile(IkiWiki::template_file("searchquery.tmpl"))));
+       if (! defined $config{omega_cgi}) {
+               $config{omega_cgi}="/usr/lib/cgi-bin/omega/omega";
        }
        }
-} #}}}
+}
 
 my $form;
 
 my $form;
-sub pagetemplate (@) { #{{{
+sub pagetemplate (@) {
        my %params=@_;
        my $page=$params{page};
        my $template=$params{template};
        my %params=@_;
        my $page=$params{page};
        my $template=$params{template};
@@ -54,83 +58,105 @@ sub pagetemplate (@) { #{{{
 
                $template->param(searchform => $form);
        }
 
                $template->param(searchform => $form);
        }
-} #}}}
-
-my %toindex;
-sub needsbuild ($) { #{{{
-       %toindex = map { pagename($_) => 1 } @{shift()};
-} #}}}
+}
 
 my $scrubber;
 
 my $scrubber;
-sub filter (@) { #{{{
+my $stemmer;
+sub index (@) {
        my %params=@_;
        my %params=@_;
+
+       setupfiles();
+
+       # A unique pageterm is used to identify the document for a page.
+       my $pageterm=pageterm($params{page});
+       return $params{content} unless defined $pageterm;
        
        
-       if ($params{page} eq $params{destpage} && $toindex{$params{page}}) {
-               # index page
-               my $db=xapiandb();
-               my $doc=Search::Xapian::Document->new();
-               my $title;
-               if (exists $pagestate{$params{page}}{meta} &&
-                   exists $pagestate{$params{page}}{meta}{title}) {
-                       $title=$pagestate{$params{page}}{meta}{title};
-               }
-               else {
-                       $title=IkiWiki::pagetitle($params{page});
-               }
+       my $db=xapiandb();
+       my $doc=Search::Xapian::Document->new();
+       my $caption=pagetitle($params{page});
+       my $title;
+       if (exists $pagestate{$params{page}}{meta} &&
+               exists $pagestate{$params{page}}{meta}{title}) {
+               $title=$pagestate{$params{page}}{meta}{title};
+       }
+       else {
+               $title=$caption;
+       }
 
 
-               # Remove any html from text to be indexed.
-               # TODO: This removes html that is in eg, a markdown pre,
-               # which should not be removed.
-               if (! defined $scrubber) {
-                       eval q{use HTML::Scrubber};
-                       error($@) if $@;
+       # Remove html from text to be indexed.
+       if (! defined $scrubber) {
+               eval q{use HTML::Scrubber};
+               if (! $@) {
                        $scrubber=HTML::Scrubber->new(allow => []);
                }
                        $scrubber=HTML::Scrubber->new(allow => []);
                }
-               my $toindex=$scrubber->scrub($params{content});
-
-               # Take 512 characters for a sample, then extend it out
-               # if it stopped in the middle of a word.
-               my $size=512;
-               my ($sample)=substr($toindex, 0, $size);
-               my $next=substr($toindex, $size++, 1);
-               while ($next !~ /\s/) {
+       }
+       my $toindex = defined $scrubber ? $scrubber->scrub($params{content}) : $params{content};
+       
+       # Take 512 characters for a sample, then extend it out
+       # if it stopped in the middle of a word.
+       my $size=512;
+       my ($sample)=substr($toindex, 0, $size);
+       if (length($sample) == $size) {
+               my $max=length($toindex);
+               my $next;
+               while ($size < $max &&
+                      ($next=substr($toindex, $size++, 1)) !~ /\s/) {
                        $sample.=$next;
                        $sample.=$next;
-                       $next=substr($toindex, $size++, 1);
                }
                }
-               $sample=~s/\n/ /g;
-
-               # data used by omega
-               $doc->set_data(
-                       "url=".urlto($params{page}, "")."\n".
-                       "sample=$sample\n".
-                       "caption=$title\n".
-                       "modtime=$IkiWiki::pagemtime{$params{page}}\n".
-                       "size=".length($params{content})."\n"
-               );
-
-               my $tg = Search::Xapian::TermGenerator->new();
-               $tg->set_stemmer(new Search::Xapian::Stem("english"));
-               $tg->set_document($doc);
-               $tg->index_text($params{page}, 2);
-               $tg->index_text($title, 2);
-               $tg->index_text($toindex);
-
-               my $pageterm=pageterm($params{page});
-               $doc->add_term($pageterm);
-               $db->replace_document_by_term($pageterm, $doc);
+       }
+       $sample=~s/\n/ /g;
+       
+       # data used by omega
+       # Decode html entities in it, since omega re-encodes them.
+       eval q{use HTML::Entities};
+       $doc->set_data(
+               "url=".urlto($params{page}, "")."\n".
+               "sample=".decode_entities($sample)."\n".
+               "caption=".decode_entities($caption)."\n".
+               "modtime=$IkiWiki::pagemtime{$params{page}}\n".
+               "size=".length($params{content})."\n"
+       );
+
+       # Index document and add terms for other metadata.
+       my $tg = Search::Xapian::TermGenerator->new();
+       if (! $stemmer) {
+               my $langcode=$ENV{LANG} || "en";
+               $langcode=~s/_.*//;
+
+               # This whitelist is here to work around a xapian bug (#486138)
+               my @whitelist=qw{da de en es fi fr hu it no pt ru ro sv tr};
+
+               if (grep { $_ eq $langcode } @whitelist) {
+                       $stemmer=Search::Xapian::Stem->new($langcode);
+               }
+               else {
+                       $stemmer=Search::Xapian::Stem->new("english");
+               }
+       }
+       $tg->set_stemmer($stemmer);
+       $tg->set_document($doc);
+       $tg->index_text($params{page}, 2);
+       $tg->index_text($caption, 2);
+       $tg->index_text($title, 2) if $title ne $caption;
+       $tg->index_text($toindex);
+       $tg->index_text(lc($title), 1, "S"); # for title:foo
+       foreach my $link (@{$links{$params{page}}}) {
+               $tg->index_text(lc($link), 1, "XLINK"); # for link:bar
        }
 
        }
 
-       return $params{content};
-} #}}}
+       $doc->add_term($pageterm);
+       $db->replace_document_by_term($pageterm, $doc);
+}
 
 
-sub delete (@) { #{{{
+sub delete (@) {
        my $db=xapiandb();
        foreach my $page (@_) {
        my $db=xapiandb();
        foreach my $page (@_) {
-               $db->delete_document_by_term(pageterm($page));
+               my $pageterm=pageterm(pagename($page));
+               $db->delete_document_by_term($pageterm) if defined $pageterm;
        }
        }
-} #}}}
+}
 
 
-sub cgi ($) { #{{{
+sub cgi ($) {
        my $cgi=shift;
 
        if (defined $cgi->param('P')) {
        my $cgi=shift;
 
        if (defined $cgi->param('P')) {
@@ -138,20 +164,36 @@ sub cgi ($) { #{{{
                chdir("$config{wikistatedir}/xapian") || error("chdir: $!");
                $ENV{OMEGA_CONFIG_FILE}="./omega.conf";
                $ENV{CGIURL}=$config{cgiurl},
                chdir("$config{wikistatedir}/xapian") || error("chdir: $!");
                $ENV{OMEGA_CONFIG_FILE}="./omega.conf";
                $ENV{CGIURL}=$config{cgiurl},
+               IkiWiki::loadindex();
+               $ENV{HELPLINK}=htmllink("", "", "ikiwiki/searching",
+                       noimageinline => 1, linktext => "Help");
                exec($config{omega_cgi}) || error("$config{omega_cgi} failed: $!");
        }
                exec($config{omega_cgi}) || error("$config{omega_cgi} failed: $!");
        }
-} #}}}
+}
 
 
-sub pageterm ($) { #{{{
+sub pageterm ($) {
        my $page=shift;
 
        my $page=shift;
 
-       # TODO: check if > 255 char page names overflow term
-       # length; use sha1 if so?
-       return "P".$page;
-} #}}}
+       # 240 is the number used by omindex to decide when to hash an
+       # overlong term. This does not use a compatible hash method though.
+       if (length $page > 240) {
+               eval q{use Digest::SHA1};
+               if ($@) {
+                       debug("search: ".sprintf(gettext("need Digest::SHA1 to index %s"), $page)) if $@;
+                       return undef;
+               }
+
+               # Note no colon, therefore it's guaranteed to not overlap
+               # with a page with the same name as the hash..
+               return "U".lc(Digest::SHA1::sha1_hex($page));
+       }
+       else {
+               return "U:".$page;
+       }
+}
 
 my $db;
 
 my $db;
-sub xapiandb () { #{{{
+sub xapiandb () {
        if (! defined $db) {
                eval q{
                        use Search::Xapian;
        if (! defined $db) {
                eval q{
                        use Search::Xapian;
@@ -162,6 +204,21 @@ sub xapiandb () { #{{{
                        Search::Xapian::DB_CREATE_OR_OPEN());
        }
        return $db;
                        Search::Xapian::DB_CREATE_OR_OPEN());
        }
        return $db;
-} #}}}
+}
+
+{
+my $setup=0;
+sub setupfiles () {
+       if (! $setup and (! -e $config{wikistatedir}."/xapian" || $config{rebuild})) {
+               writefile("omega.conf", $config{wikistatedir}."/xapian",
+                       "database_dir .\n".
+                       "template_dir ./templates\n");
+               writefile("query", $config{wikistatedir}."/xapian/templates",
+                       IkiWiki::misctemplate(gettext("search"),
+                               readfile(IkiWiki::template_file("searchquery.tmpl"))));
+               $setup=1;
+       }
+}
+}
 
 1
 
 1