X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/6b7d90d88a723296817de735905965c1a41184bc..b34d31142b9fed28ec9cf77fe0c5d9f405d48c84:/IkiWiki/Plugin/search.pm?ds=inline diff --git a/IkiWiki/Plugin/search.pm b/IkiWiki/Plugin/search.pm index 0d4a8f552..3f0b7c9ad 100644 --- a/IkiWiki/Plugin/search.pm +++ b/IkiWiki/Plugin/search.pm @@ -4,42 +4,52 @@ package IkiWiki::Plugin::search; use warnings; use strict; -use IkiWiki 2.00; +use IkiWiki 3.00; -sub import { #{{{ +sub import { + hook(type => "getsetup", id => "search", call => \&getsetup); hook(type => "checkconfig", id => "search", call => \&checkconfig); hook(type => "pagetemplate", id => "search", call => \&pagetemplate); - # run last so other needsbuild hooks can modify the list - hook(type => "needsbuild", id => "search", call => \&needsbuild, - last => 1); - hook(type => "filter", id => "search", call => \&filter); + hook(type => "indexhtml", id => "search", call => \&indexhtml); hook(type => "delete", id => "search", call => \&delete); hook(type => "cgi", id => "search", call => \&cgi); -} # }}} + hook(type => "disable", id => "search", call => \&disable); +} -sub checkconfig () { #{{{ +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => 1, + section => "web", + }, + omega_cgi => { + type => "string", + example => "/usr/lib/cgi-bin/omega/omega", + description => "path to the omega cgi program", + safe => 0, # external program + rebuild => 0, + }, +} + +sub checkconfig () { foreach my $required (qw(url cgiurl)) { if (! length $config{$required}) { - error(sprintf(gettext("Must specify %s when using the search plugin"), $required)); + error(sprintf(gettext("Must specify %s when using the %s plugin"), $required, 'search')); } } - - if (! exists $config{omega_cgi}) { - $config{omega_cgi}="/usr/lib/cgi-bin/omega/omega"; - } - if (! -e $config{wikistatedir}."/xapian" || $config{rebuild}) { - writefile("omega.conf", $config{wikistatedir}."/xapian", - "database_dir .\n". - "template_dir ./templates\n"); - writefile("query", $config{wikistatedir}."/xapian/templates", - IkiWiki::misctemplate(gettext("search"), - readfile(IkiWiki::template_file("searchquery.tmpl")))); + if (! defined $config{omega_cgi}) { + $config{omega_cgi}="/usr/lib/cgi-bin/omega/omega"; } -} #}}} + + # This is a mass dependency, so if the search form template + # changes, every page is rebuilt. + add_depends("", "templates/searchform.tmpl"); +} my $form; -sub pagetemplate (@) { #{{{ +sub pagetemplate (@) { my %params=@_; my $page=$params{page}; my $template=$params{template}; @@ -48,110 +58,155 @@ sub pagetemplate (@) { #{{{ if ($template->query(name => "searchform")) { if (! defined $form) { my $searchform = template("searchform.tmpl", blind_cache => 1); - $searchform->param(searchaction => $config{cgiurl}); + $searchform->param(searchaction => IkiWiki::cgiurl()); + $searchform->param(html5 => $config{html5}); $form=$searchform->output; } $template->param(searchform => $form); } -} #}}} - -my %toindex; -sub needsbuild ($) { #{{{ - %toindex = map { pagename($_) => 1 } @{shift()}; -} #}}} +} my $scrubber; -sub filter (@) { #{{{ +my $stemmer; +sub indexhtml (@) { my %params=@_; + + setupfiles(); + + # A unique pageterm is used to identify the document for a page. + my $pageterm=pageterm($params{page}); + return unless defined $pageterm; - if ($params{page} eq $params{destpage} && $toindex{$params{page}}) { - # index page - my $db=xapiandb(); - my $doc=Search::Xapian::Document->new(); - my $title; - if (exists $pagestate{$params{page}}{meta} && - exists $pagestate{$params{page}}{meta}{title}) { - $title=$pagestate{$params{page}}{meta}{title}; - } - else { - $title=IkiWiki::pagetitle($params{page}); - } + my $db=xapiandb(); + my $doc=Search::Xapian::Document->new(); + my $caption=pagetitle($params{page}); + my $title; + if (exists $pagestate{$params{page}}{meta} && + exists $pagestate{$params{page}}{meta}{title}) { + $title=$pagestate{$params{page}}{meta}{title}; + } + else { + $title=$caption; + } - # Remove any html from text to be indexed. - # TODO: This removes html that is in eg, a markdown pre, - # which should not be removed. - if (! defined $scrubber) { - eval q{use HTML::Scrubber}; - error($@) if $@; + # Remove html from text to be indexed. + if (! defined $scrubber) { + eval q{use HTML::Scrubber}; + if (! $@) { $scrubber=HTML::Scrubber->new(allow => []); } - my $toindex=$scrubber->scrub($params{content}); - - # Take 512 characters for a sample, then extend it out - # if it stopped in the middle of a word. - my $size=512; - my ($sample)=substr($toindex, 0, $size); - my $next=substr($toindex, $size++, 1); - while ($next !~ /\s/) { + } + my $toindex = defined $scrubber ? $scrubber->scrub($params{content}) : $params{content}; + + # Take 512 characters for a sample, then extend it out + # if it stopped in the middle of a word. + my $size=512; + my ($sample)=substr($toindex, 0, $size); + if (length($sample) == $size) { + my $max=length($toindex); + my $next; + while ($size < $max && + ($next=substr($toindex, $size++, 1)) !~ /\s/) { $sample.=$next; - $next=substr($toindex, $size++, 1); } - $sample=~s/\n/ /g; - - # data used by omega - $doc->set_data( - "url=".urlto($params{page}, "")."\n". - "sample=$sample\n". - "caption=$title\n". - "modtime=$IkiWiki::pagemtime{$params{page}}\n". - "size=".length($params{content})."\n" - ); + } + $sample=~s/\n/ /g; + + my $url=urlto($params{destpage}, ""); + if (defined $pagestate{$params{page}}{meta}{permalink}) { + $url=$pagestate{$params{page}}{meta}{permalink} + } + + # data used by omega + # Decode html entities in it, since omega re-encodes them. + eval q{use HTML::Entities}; + error $@ if $@; + $doc->set_data( + "url=".$url."\n". + "sample=".decode_entities($sample)."\n". + "caption=".decode_entities($caption)."\n". + "modtime=$IkiWiki::pagemtime{$params{page}}\n". + "size=".length($params{content})."\n" + ); - my $tg = Search::Xapian::TermGenerator->new(); - $tg->set_stemmer(new Search::Xapian::Stem("english")); - $tg->set_document($doc); - $tg->index_text($params{page}, 2); - $tg->index_text($title, 2); - $tg->index_text($toindex); + # Index document and add terms for other metadata. + my $tg = Search::Xapian::TermGenerator->new(); + if (! $stemmer) { + my $langcode=$ENV{LANG} || "en"; + $langcode=~s/_.*//; - my $pageterm=pageterm($params{page}); - $doc->add_term($pageterm); - $db->replace_document_by_term($pageterm, $doc); + # This whitelist is here to work around a xapian bug (#486138) + my @whitelist=qw{da de en es fi fr hu it no pt ru ro sv tr}; + + if (grep { $_ eq $langcode } @whitelist) { + $stemmer=Search::Xapian::Stem->new($langcode); + } + else { + $stemmer=Search::Xapian::Stem->new("english"); + } + } + $tg->set_stemmer($stemmer); + $tg->set_document($doc); + $tg->index_text($params{page}, 2); + $tg->index_text($caption, 2); + $tg->index_text($title, 2) if $title ne $caption; + $tg->index_text($toindex); + $tg->index_text(lc($title), 1, "S"); # for title:foo + foreach my $link (@{$links{$params{page}}}) { + $tg->index_text(lc($link), 1, "XLINK"); # for link:bar } - return $params{content}; -} #}}} + $doc->add_term($pageterm); + $db->replace_document_by_term($pageterm, $doc); +} -sub delete (@) { #{{{ +sub delete (@) { my $db=xapiandb(); foreach my $page (@_) { - $db->delete_document_by_term(pageterm($page)); + my $pageterm=pageterm(pagename($page)); + $db->delete_document_by_term($pageterm) if defined $pageterm; } -} #}}} +} -sub cgi ($) { #{{{ +sub cgi ($) { my $cgi=shift; if (defined $cgi->param('P')) { # only works for GET requests chdir("$config{wikistatedir}/xapian") || error("chdir: $!"); $ENV{OMEGA_CONFIG_FILE}="./omega.conf"; - $ENV{CGIURL}=$config{cgiurl}, + $ENV{CGIURL}=IkiWiki::cgiurl(); + IkiWiki::loadindex(); + $ENV{HELPLINK}=htmllink("", "", "ikiwiki/searching", + noimageinline => 1, linktext => "Help"); exec($config{omega_cgi}) || error("$config{omega_cgi} failed: $!"); } -} #}}} +} -sub pageterm ($) { #{{{ +sub pageterm ($) { my $page=shift; - # TODO: check if > 255 char page names overflow term - # length; use sha1 if so? - return "P".$page; -} #}}} + # 240 is the number used by omindex to decide when to hash an + # overlong term. This does not use a compatible hash method though. + if (length $page > 240) { + eval q{use Digest::SHA}; + if ($@) { + debug("search: ".sprintf(gettext("need Digest::SHA to index %s"), $page)) if $@; + return undef; + } + + # Note no colon, therefore it's guaranteed to not overlap + # with a page with the same name as the hash.. + return "U".lc(Digest::SHA::sha1_hex($page)); + } + else { + return "U:".$page; + } +} my $db; -sub xapiandb () { #{{{ +sub xapiandb () { if (! defined $db) { eval q{ use Search::Xapian; @@ -162,6 +217,40 @@ sub xapiandb () { #{{{ Search::Xapian::DB_CREATE_OR_OPEN()); } return $db; -} #}}} +} + +{ +my $setup=0; +sub setupfiles () { + if (! $setup and (! -e $config{wikistatedir}."/xapian" || $config{rebuild})) { + writefile("omega.conf", $config{wikistatedir}."/xapian", + "database_dir .\n". + "template_dir ./templates\n"); + + # Avoid omega interpreting anything in the cgitemplate + # as an omegascript command. + eval q{use IkiWiki::CGI}; + my $template=IkiWiki::cgitemplate(undef, gettext("search"), "\0", + searchform => "", # avoid showing the small search form + ); + eval q{use HTML::Entities}; + error $@ if $@; + $template=encode_entities($template, '\$'); + + my $querytemplate=readfile(IkiWiki::template_file("searchquery.tmpl")); + $template=~s/\0/$querytemplate/; + + writefile("query", $config{wikistatedir}."/xapian/templates", + $template); + $setup=1; + } +} +} + +sub disable () { + if (-d $config{wikistatedir}."/xapian") { + system("rm", "-rf", $config{wikistatedir}."/xapian"); + } +} 1