From 9a48669f1e9d83422781f6f175c3be8bce80cd26 Mon Sep 17 00:00:00 2001
From: Joey Hess <joey@kodama.kitenet.net>
Date: Tue, 11 Nov 2008 15:40:04 -0500
Subject: [PATCH] avoid multiple ikiwiki cgi processes piling up, eating all
 memory, and thrashing

Fixed by making the cgi wrapper wait on a cgilock.
If you had to set apache's MaxClients low to avoid ikiwiki thrashing
your server, you can now turn it up to a high value.

The downside to this is that a cgi call that doesn't need to call lockwiki
will be serialised by this so only one can run at a time. (For example,
do=search.) There are few such calls, and all of them call loadindex,
so each still eats gobs of memory, so serialising them still seems ok.
---
 IkiWiki/Wrapper.pm | 15 +++++++++++++++
 1 file changed, 15 insertions(+)

diff --git a/IkiWiki/Wrapper.pm b/IkiWiki/Wrapper.pm
index 99237d3b5..d7d7f65b8 100644
--- a/IkiWiki/Wrapper.pm
+++ b/IkiWiki/Wrapper.pm
@@ -44,6 +44,7 @@ EOF
 	}
 
 	my $check_commit_hook="";
+	my $pre_exec="";
 	if ($config{post_commit}) {
 		# Optimise checking !commit_hook_enabled() , 
 		# so that ikiwiki does not have to be started if the
@@ -67,6 +68,19 @@ EOF
 	}
 EOF
 	}
+	elsif ($config{cgi}) {
+		# Avoid more than one ikiwiki cgi running at a time by
+		# taking a cgi lock. Since ikiwiki uses several MB of
+		# memory, a pile up of processes could cause thrashing
+		# otherwise.
+		$pre_exec=<<"EOF";
+	{
+		int fd=open("$config{wikistatedir}/cgilock", O_CREAT | O_RDWR);
+		if (fd != -1)
+			flock(fd, LOCK_EX);
+	}
+EOF
+	}
 
 	$Data::Dumper::Indent=0; # no newlines
 	my $configstring=Data::Dumper->Dump([\%config], ['*config']);
@@ -122,6 +136,7 @@ $envsave
 		exit(1);
 	}
 
+$pre_exec
 	execl("$this", "$this", NULL);
 	perror("exec $this");
 	exit(1);
-- 
2.39.5