X-Git-Url: http://git.vanrenterghem.biz/git.ikiwiki.info.git/blobdiff_plain/8be2b60aaca495b84c299cf69ee8958603bd7204..2b0c8d167e8b98d11e3504393fe4b98596dfd891:/IkiWiki/Plugin/search.pm?ds=inline diff --git a/IkiWiki/Plugin/search.pm b/IkiWiki/Plugin/search.pm index 9bf223cf0..d79e3170e 100644 --- a/IkiWiki/Plugin/search.pm +++ b/IkiWiki/Plugin/search.pm @@ -1,43 +1,49 @@ #!/usr/bin/perl -# hyperestraier search engine plugin +# xapian-omega search engine plugin package IkiWiki::Plugin::search; use warnings; use strict; -use IkiWiki 2.00; - -sub import { #{{{ - hook(type => "getopt", id => "hyperestraier", - call => \&getopt); - hook(type => "checkconfig", id => "hyperestraier", - call => \&checkconfig); - hook(type => "pagetemplate", id => "hyperestraier", - call => \&pagetemplate); - hook(type => "delete", id => "hyperestraier", - call => \&delete); - hook(type => "change", id => "hyperestraier", - call => \&change); - hook(type => "cgi", id => "hyperestraier", - call => \&cgi); -} # }}} - -sub getopt () { #{{{ - eval q{use Getopt::Long}; - error($@) if $@; - Getopt::Long::Configure('pass_through'); - GetOptions("estseek=s" => \$config{estseek}); -} #}}} - -sub checkconfig () { #{{{ +use IkiWiki 3.00; + +sub import { + hook(type => "getsetup", id => "search", call => \&getsetup); + hook(type => "checkconfig", id => "search", call => \&checkconfig); + hook(type => "pagetemplate", id => "search", call => \&pagetemplate); + hook(type => "postscan", id => "search", call => \&index); + hook(type => "delete", id => "search", call => \&delete); + hook(type => "cgi", id => "search", call => \&cgi); +} + +sub getsetup () { + return + plugin => { + safe => 1, + rebuild => 1, + }, + omega_cgi => { + type => "string", + example => "/usr/lib/cgi-bin/omega/omega", + description => "path to the omega cgi program", + safe => 0, # external program + rebuild => 0, + }, +} + +sub checkconfig () { foreach my $required (qw(url cgiurl)) { if (! length $config{$required}) { error(sprintf(gettext("Must specify %s when using the search plugin"), $required)); } } -} #}}} + + if (! defined $config{omega_cgi}) { + $config{omega_cgi}="/usr/lib/cgi-bin/omega/omega"; + } +} my $form; -sub pagetemplate (@) { #{{{ +sub pagetemplate (@) { my %params=@_; my $page=$params{page}; my $template=$params{template}; @@ -52,97 +58,167 @@ sub pagetemplate (@) { #{{{ $template->param(searchform => $form); } -} #}}} - -sub delete (@) { #{{{ - debug(gettext("cleaning hyperestraier search index")); - estcmd("purge -cl"); - estcfg(); -} #}}} - -sub change (@) { #{{{ - debug(gettext("updating hyperestraier search index")); - estcmd("gather -cm -bc -cl -sd", - map { - map { - Encode::encode_utf8($config{destdir}."/".$_) - } @{$renderedfiles{pagename($_)}}; - } @_ - ); - estcfg(); -} #}}} +} -sub cgi ($) { #{{{ - my $cgi=shift; +my $scrubber; +my $stemmer; +sub index (@) { + my %params=@_; - if (defined $cgi->param('phrase') || defined $cgi->param("navi")) { - # only works for GET requests - chdir("$config{wikistatedir}/hyperestraier") || error("chdir: $!"); - exec("./".IkiWiki::basename($config{cgiurl})) || error("estseek.cgi failed"); + setupfiles(); + + # A unique pageterm is used to identify the document for a page. + my $pageterm=pageterm($params{page}); + return $params{content} unless defined $pageterm; + + my $db=xapiandb(); + my $doc=Search::Xapian::Document->new(); + my $caption=pagetitle($params{page}); + my $title; + if (exists $pagestate{$params{page}}{meta} && + exists $pagestate{$params{page}}{meta}{title}) { + $title=$pagestate{$params{page}}{meta}{title}; + } + else { + $title=$caption; } -} #}}} -my $configured=0; -sub estcfg () { #{{{ - return if $configured; - $configured=1; + # Remove html from text to be indexed. + if (! defined $scrubber) { + eval q{use HTML::Scrubber}; + if (! $@) { + $scrubber=HTML::Scrubber->new(allow => []); + } + } + my $toindex = defined $scrubber ? $scrubber->scrub($params{content}) : $params{content}; - my $estdir="$config{wikistatedir}/hyperestraier"; - my $cgi=IkiWiki::basename($config{cgiurl}); - $cgi=~s/\..*$//; - - my $newfile="$estdir/$cgi.tmpl.new"; - my $cleanup = sub { unlink($newfile) }; - open(TEMPLATE, ">:utf8", $newfile) || error("open $newfile: $!", $cleanup); - print TEMPLATE IkiWiki::misctemplate("search", - "\n\n\n\n\n\n", - forcebaseurl => IkiWiki::dirname($config{cgiurl})."/") || - error("write $newfile: $!", $cleanup); - close TEMPLATE || error("save $newfile: $!", $cleanup); - rename($newfile, "$estdir/$cgi.tmpl") || - error("rename $newfile: $!", $cleanup); + # Take 512 characters for a sample, then extend it out + # if it stopped in the middle of a word. + my $size=512; + my ($sample)=substr($toindex, 0, $size); + if (length($sample) == $size) { + my $max=length($toindex); + my $next; + while ($size < $max && + ($next=substr($toindex, $size++, 1)) !~ /\s/) { + $sample.=$next; + } + } + $sample=~s/\n/ /g; - $newfile="$estdir/$cgi.conf"; - open(TEMPLATE, ">$newfile") || error("open $newfile: $!", $cleanup); - my $template=template("estseek.conf"); - eval q{use Cwd 'abs_path'}; - $template->param( - index => $estdir, - tmplfile => "$estdir/$cgi.tmpl", - destdir => abs_path($config{destdir}), - url => $config{url}, + # data used by omega + # Decode html entities in it, since omega re-encodes them. + eval q{use HTML::Entities}; + $doc->set_data( + "url=".urlto($params{page}, "")."\n". + "sample=".decode_entities($sample)."\n". + "caption=".decode_entities($caption)."\n". + "modtime=$IkiWiki::pagemtime{$params{page}}\n". + "size=".length($params{content})."\n" ); - print TEMPLATE $template->output || error("write $newfile: $!", $cleanup); - close TEMPLATE || error("save $newfile: $!", $cleanup); - rename($newfile, "$estdir/$cgi.conf") || - error("rename $newfile: $!", $cleanup); - - $cgi="$estdir/".IkiWiki::basename($config{cgiurl}); - unlink($cgi); - my $estseek = defined $config{estseek} ? $config{estseek} : '/usr/lib/estraier/estseek.cgi'; - symlink($estseek, $cgi) || error("symlink $estseek $cgi: $!"); -} # }}} - -sub estcmd ($;@) { #{{{ - my @params=split(' ', shift); - push @params, "-cl", "$config{wikistatedir}/hyperestraier"; - if (@_) { - push @params, "-"; + + # Index document and add terms for other metadata. + my $tg = Search::Xapian::TermGenerator->new(); + if (! $stemmer) { + my $langcode=$ENV{LANG} || "en"; + $langcode=~s/_.*//; + + # This whitelist is here to work around a xapian bug (#486138) + my @whitelist=qw{da de en es fi fr hu it no pt ru ro sv tr}; + + if (grep { $_ eq $langcode } @whitelist) { + $stemmer=Search::Xapian::Stem->new($langcode); + } + else { + $stemmer=Search::Xapian::Stem->new("english"); + } } - - my $pid=open(CHILD, "|-"); - if ($pid) { - # parent - foreach (@_) { - print CHILD "$_\n"; + $tg->set_stemmer($stemmer); + $tg->set_document($doc); + $tg->index_text($params{page}, 2); + $tg->index_text($caption, 2); + $tg->index_text($title, 2) if $title ne $caption; + $tg->index_text($toindex); + $tg->index_text(lc($title), 1, "S"); # for title:foo + foreach my $link (@{$links{$params{page}}}) { + $tg->index_text(lc($link), 1, "XLINK"); # for link:bar + } + + $doc->add_term($pageterm); + $db->replace_document_by_term($pageterm, $doc); +} + +sub delete (@) { + my $db=xapiandb(); + foreach my $page (@_) { + my $pageterm=pageterm(pagename($page)); + $db->delete_document_by_term($pageterm) if defined $pageterm; + } +} + +sub cgi ($) { + my $cgi=shift; + + if (defined $cgi->param('P')) { + # only works for GET requests + chdir("$config{wikistatedir}/xapian") || error("chdir: $!"); + $ENV{OMEGA_CONFIG_FILE}="./omega.conf"; + $ENV{CGIURL}=$config{cgiurl}, + IkiWiki::loadindex(); + $ENV{HELPLINK}=htmllink("", "", "ikiwiki/searching", + noimageinline => 1, linktext => "Help"); + exec($config{omega_cgi}) || error("$config{omega_cgi} failed: $!"); + } +} + +sub pageterm ($) { + my $page=shift; + + # 240 is the number used by omindex to decide when to hash an + # overlong term. This does not use a compatible hash method though. + if (length $page > 240) { + eval q{use Digest::SHA1}; + if ($@) { + debug("search: ".sprintf(gettext("need Digest::SHA1 to index %s"), $page)) if $@; + return undef; } - close(CHILD) || print STDERR "estcmd @params exited nonzero: $?\n"; + + # Note no colon, therefore it's guaranteed to not overlap + # with a page with the same name as the hash.. + return "U".lc(Digest::SHA1::sha1_hex($page)); } else { - # child - open(STDOUT, "/dev/null"); # shut it up (closing won't work) - exec("estcmd", @params) || error("can't run estcmd"); + return "U:".$page; + } +} + +my $db; +sub xapiandb () { + if (! defined $db) { + eval q{ + use Search::Xapian; + use Search::Xapian::WritableDatabase; + }; + error($@) if $@; + $db=Search::Xapian::WritableDatabase->new($config{wikistatedir}."/xapian/default", + Search::Xapian::DB_CREATE_OR_OPEN()); + } + return $db; +} + +{ +my $setup=0; +sub setupfiles () { + if (! $setup and (! -e $config{wikistatedir}."/xapian" || $config{rebuild})) { + writefile("omega.conf", $config{wikistatedir}."/xapian", + "database_dir .\n". + "template_dir ./templates\n"); + writefile("query", $config{wikistatedir}."/xapian/templates", + IkiWiki::misctemplate(gettext("search"), + readfile(IkiWiki::template_file("searchquery.tmpl")))); + $setup=1; } -} #}}} +} +} 1